From 9d769708667f8408fadbae2e20e6f4a3cbaa1f02 Mon Sep 17 00:00:00 2001 From: Anton Tayanovskyy Date: Thu, 19 May 2022 10:50:43 -0400 Subject: [PATCH 1/4] Implement opt-in caching via actions/cache --- README.md | 22 +++++++++++++++++ package.json | 1 + src/main.ts | 70 +++++++++++++++++++++++++++++++++++++++++++++++++--- 3 files changed, 89 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 188f5fc6..fbc8da7b 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,28 @@ jobs: arch: x86-64 ``` +### Caching + +This action can use [actions/cache](https://github.com/actions/cache) under the hood. Caching needs to be enabled explicitly and only works for specific tags. + +```yaml +# ... +jobs: + my_job: + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Github token scoped to job + steps: + - name: Install tf2pulumi + uses: jaxxstorm/action-install-gh-release@v1.5.0 + with: # Grab a specific tag with caching + repo: pulumi/tf2pulumi + tag: v0.7.0 + cache: enable +``` + +Caching helps avoid +[Rate limiting](https://docs.github.com/en/rest/overview/resources-in-the-rest-api#requests-from-github-actions), since this action does not need to scan tags and releases on a cache hit. Caching currently is not expected to speed up installation. + ## Finding a release By default, this action will lookup the Platform and Architecture of the runner and use those values to interpolate and match a release package. **The release package name is first converted to lowercase**. The match pattern is: diff --git a/package.json b/package.json index e71a3a4f..fc3f6dd9 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@actions/github": "^5.0.1", "@actions/tool-cache": "^1.7.1", "@octokit/plugin-throttling": "^3.6.2", + "cache": "github:actions/cache", "minimist": "^1.2.6", "mkdirp-promise": "^5.0.1", "node-fetch": "^2.6.7" diff --git a/src/main.ts b/src/main.ts index 33320814..290953c3 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,11 +1,22 @@ +import * as os from "os"; +import * as path from "path"; + +import * as cache from "@actions/cache"; import * as core from "@actions/core"; import * as tc from "@actions/tool-cache"; import { GitHub, getOctokitOptions} from "@actions/github/lib/utils"; -import * as os from "os"; import { throttling } from "@octokit/plugin-throttling"; const ThrottlingOctokit = GitHub.plugin(throttling); +interface ToolInfo { + owner: string; + project: string; + tag: string; + osPlatform: string; + osArch: string; +} + async function run() { try { @@ -50,6 +61,10 @@ async function run() { ) } + const cacheEnabled = (core.getInput("cache") === "enable") + && tag !== "latest" + && tag !== ""; + const [owner, project] = repo.split("/") let osMatch : string[] = [] @@ -94,6 +109,26 @@ async function run() { core.info(`==> System reported arch: ${os.arch()}`) core.info(`==> Using arch: ${osArch}`) + let toolInfo: ToolInfo = { + owner: owner, + project: project, + tag: tag, + osArch: osArch, + osPlatform: osPlatform + }; + let dest = toolPath(toolInfo); + + // Look in the cache first. + let cacheKey = cachePrimaryKey(toolInfo); + if (cacheEnabled && cacheKey !== undefined) { + let ok = await cache.restoreCache([dest], cacheKey); + if (ok !== undefined) { + core.info(`Found ${project} in the cache: ${dest}`) + core.addPath(dest); + return; + } + } + let getReleaseUrl; if (tag === "latest") { getReleaseUrl = await octokit.rest.repos.getLatestRelease({ @@ -129,10 +164,14 @@ async function run() { core.info(`Downloading ${project} from ${url}`) const binPath = await tc.downloadTool(url); - const extractedPath = await extractFn(binPath); - core.info(`Successfully extracted ${project} to ${extractedPath}`) + await extractFn(binPath, dest); - core.addPath(extractedPath); + if (cacheEnabled && cacheKey !== undefined) { + await cache.saveCache([dest], cacheKey); + } + + core.addPath(dest); + core.info(`Successfully extracted ${project} to ${dest}`) } catch (error) { if (error instanceof Error) { core.setFailed(error.message); @@ -142,6 +181,29 @@ async function run() { } } +function cachePrimaryKey(info: ToolInfo): string|undefined { + // Currently not caching "latest" verisons of the tool. + if (info.tag === "latest") { + return undefined; + } + return "action-install-gh-release/" + + `${info.owner}/${info.project}/${info.tag}/${info.osPlatform}-${info.osArch}`; +} + +function toolPath(info: ToolInfo): string { + return path.join(getCacheDirectory(), + info.owner, info.project, info.tag, + `${info.osPlatform}-${info.osArch}`); +} + +function getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + if (cacheDirectory === '') { + core.warning('Expected RUNNER_TOOL_CACHE to be defined'); + } + return cacheDirectory; +} + function getExtractFn(assetName: any) { if (assetName.endsWith('.tar.gz')) { return tc.extractTar; From 9c80b9072cc31ccba09b36f39425af055773cfc3 Mon Sep 17 00:00:00 2001 From: Anton Tayanovskyy Date: Thu, 19 May 2022 10:51:11 -0400 Subject: [PATCH 2/4] Run npm install --- .../tool-cache/node_modules => }/.bin/semver | 0 node_modules/.package-lock.json | 527 +- node_modules/@actions/cache/LICENSE.md | 9 + node_modules/@actions/cache/README.md | 44 + node_modules/@actions/cache/lib/cache.d.ts | 32 + node_modules/@actions/cache/lib/cache.js | 183 + node_modules/@actions/cache/lib/cache.js.map | 1 + .../cache/lib/internal/cacheHttpClient.d.ts | 8 + .../cache/lib/internal/cacheHttpClient.js | 213 + .../cache/lib/internal/cacheHttpClient.js.map | 1 + .../cache/lib/internal/cacheUtils.d.ts | 12 + .../@actions/cache/lib/internal/cacheUtils.js | 175 + .../cache/lib/internal/cacheUtils.js.map | 1 + .../cache/lib/internal/constants.d.ts | 12 + .../@actions/cache/lib/internal/constants.js | 24 + .../cache/lib/internal/constants.js.map | 1 + .../cache/lib/internal/downloadUtils.d.ts | 74 + .../cache/lib/internal/downloadUtils.js | 231 + .../cache/lib/internal/downloadUtils.js.map | 1 + .../cache/lib/internal/requestUtils.d.ts | 8 + .../cache/lib/internal/requestUtils.js | 120 + .../cache/lib/internal/requestUtils.js.map | 1 + .../@actions/cache/lib/internal/tar.d.ts | 4 + .../@actions/cache/lib/internal/tar.js | 164 + .../@actions/cache/lib/internal/tar.js.map | 1 + node_modules/@actions/cache/lib/options.d.ts | 56 + node_modules/@actions/cache/lib/options.js | 62 + .../@actions/cache/lib/options.js.map | 1 + .../node_modules/@actions/http-client/LICENSE | 21 + .../@actions/http-client/README.md | 73 + .../@actions/http-client/lib/auth.d.ts | 26 + .../@actions/http-client/lib/auth.js | 81 + .../@actions/http-client/lib/auth.js.map | 1 + .../@actions/http-client/lib/index.d.ts | 123 + .../@actions/http-client/lib/index.js | 605 + .../@actions/http-client/lib/index.js.map | 1 + .../@actions/http-client/lib/interfaces.d.ts | 44 + .../@actions/http-client/lib/interfaces.js | 3 + .../http-client/lib/interfaces.js.map | 1 + .../@actions/http-client/lib/proxy.d.ts | 2 + .../@actions/http-client/lib/proxy.js | 61 + .../@actions/http-client/lib/proxy.js.map | 1 + .../@actions/http-client/package.json | 48 + node_modules/@actions/cache/package.json | 55 + node_modules/@actions/core/lib/core.d.ts | 8 + node_modules/@actions/core/lib/core.js | 10 + node_modules/@actions/core/lib/core.js.map | 2 +- node_modules/@actions/core/lib/oidc-utils.js | 2 +- .../@actions/core/lib/oidc-utils.js.map | 2 +- node_modules/@actions/core/lib/summary.d.ts | 202 + node_modules/@actions/core/lib/summary.js | 283 + node_modules/@actions/core/lib/summary.js.map | 1 + .../node_modules/@actions/http-client/LICENSE | 21 + .../@actions/http-client/README.md | 73 + .../@actions/http-client/lib/auth.d.ts | 26 + .../@actions/http-client/lib/auth.js | 81 + .../@actions/http-client/lib/auth.js.map | 1 + .../@actions/http-client/lib/index.d.ts | 123 + .../@actions/http-client/lib/index.js | 605 + .../@actions/http-client/lib/index.js.map | 1 + .../@actions/http-client/lib/interfaces.d.ts | 44 + .../@actions/http-client/lib/interfaces.js | 3 + .../http-client/lib/interfaces.js.map | 1 + .../@actions/http-client/lib/proxy.d.ts | 2 + .../@actions/http-client/lib/proxy.js | 61 + .../@actions/http-client/lib/proxy.js.map | 1 + .../@actions/http-client/package.json | 48 + node_modules/@actions/core/package.json | 4 +- node_modules/@actions/exec/package.json | 2 +- node_modules/@actions/glob/LICENSE.md | 9 + node_modules/@actions/glob/README.md | 113 + node_modules/@actions/glob/lib/glob.d.ts | 10 + node_modules/@actions/glob/lib/glob.js | 26 + node_modules/@actions/glob/lib/glob.js.map | 1 + .../lib/internal-glob-options-helper.d.ts | 5 + .../glob/lib/internal-glob-options-helper.js | 50 + .../lib/internal-glob-options-helper.js.map | 1 + .../glob/lib/internal-glob-options.d.ts | 29 + .../glob/lib/internal-glob-options.js | 3 + .../glob/lib/internal-glob-options.js.map | 1 + .../@actions/glob/lib/internal-globber.d.ts | 42 + .../@actions/glob/lib/internal-globber.js | 235 + .../@actions/glob/lib/internal-globber.js.map | 1 + .../glob/lib/internal-match-kind.d.ts | 13 + .../@actions/glob/lib/internal-match-kind.js | 18 + .../glob/lib/internal-match-kind.js.map | 1 + .../glob/lib/internal-path-helper.d.ts | 42 + .../@actions/glob/lib/internal-path-helper.js | 198 + .../glob/lib/internal-path-helper.js.map | 1 + .../@actions/glob/lib/internal-path.d.ts | 15 + .../@actions/glob/lib/internal-path.js | 113 + .../@actions/glob/lib/internal-path.js.map | 1 + .../glob/lib/internal-pattern-helper.d.ts | 15 + .../glob/lib/internal-pattern-helper.js | 94 + .../glob/lib/internal-pattern-helper.js.map | 1 + .../@actions/glob/lib/internal-pattern.d.ts | 64 + .../@actions/glob/lib/internal-pattern.js | 255 + .../@actions/glob/lib/internal-pattern.js.map | 1 + .../glob/lib/internal-search-state.d.ts | 5 + .../glob/lib/internal-search-state.js | 11 + .../glob/lib/internal-search-state.js.map | 1 + node_modules/@actions/glob/package.json | 43 + node_modules/@actions/io/package.json | 2 +- .../@azure/abort-controller/CHANGELOG.md | 34 + node_modules/@azure/abort-controller/LICENSE | 21 + .../@azure/abort-controller/README.md | 110 + .../dist-esm/src/AbortController.js | 118 + .../dist-esm/src/AbortController.js.map | 1 + .../dist-esm/src/AbortSignal.js | 115 + .../dist-esm/src/AbortSignal.js.map | 1 + .../abort-controller/dist-esm/src/index.js | 12 + .../dist-esm/src/index.js.map | 1 + .../@azure/abort-controller/dist/index.js | 239 + .../@azure/abort-controller/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../node_modules/tslib/LICENSE.txt | 12 + .../node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../node_modules/tslib/package.json | 38 + .../node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../node_modules/tslib/tslib.es6.js | 248 + .../node_modules/tslib/tslib.html | 1 + .../node_modules/tslib/tslib.js | 317 + .../@azure/abort-controller/package.json | 104 + .../@azure/abort-controller/shims-public.d.ts | 5 + .../types/3.1/AbortController.d.ts | 85 + .../types/3.1/AbortSignal.d.ts | 80 + .../abort-controller/types/3.1/index.d.ts | 3 + .../types/src/AbortController.d.ts | 85 + .../types/src/AbortController.d.ts.map | 1 + .../types/src/AbortSignal.d.ts | 80 + .../types/src/AbortSignal.d.ts.map | 1 + .../abort-controller/types/src/index.d.ts | 3 + .../abort-controller/types/src/index.d.ts.map | 1 + .../types/src/tsdoc-metadata.json | 11 + node_modules/@azure/core-auth/CHANGELOG.md | 61 + node_modules/@azure/core-auth/LICENSE | 21 + node_modules/@azure/core-auth/README.md | 78 + .../dist-esm/src/azureKeyCredential.js | 38 + .../dist-esm/src/azureKeyCredential.js.map | 1 + .../dist-esm/src/azureNamedKeyCredential.js | 62 + .../src/azureNamedKeyCredential.js.map | 1 + .../dist-esm/src/azureSASCredential.js | 50 + .../dist-esm/src/azureSASCredential.js.map | 1 + .../@azure/core-auth/dist-esm/src/index.js | 7 + .../core-auth/dist-esm/src/index.js.map | 1 + .../core-auth/dist-esm/src/tokenCredential.js | 19 + .../dist-esm/src/tokenCredential.js.map | 1 + .../@azure/core-auth/dist-esm/src/tracing.js | 4 + .../core-auth/dist-esm/src/tracing.js.map | 1 + .../core-auth/dist-esm/src/typeguards.js | 39 + .../core-auth/dist-esm/src/typeguards.js.map | 1 + node_modules/@azure/core-auth/dist/index.js | 215 + .../@azure/core-auth/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../core-auth/node_modules/tslib/LICENSE.txt | 12 + .../core-auth/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../core-auth/node_modules/tslib/package.json | 38 + .../core-auth/node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../core-auth/node_modules/tslib/tslib.es6.js | 248 + .../core-auth/node_modules/tslib/tslib.html | 1 + .../core-auth/node_modules/tslib/tslib.js | 317 + node_modules/@azure/core-auth/package.json | 99 + .../@azure/core-auth/types/3.1/core-auth.d.ts | 258 + .../core-auth/types/latest/core-auth.d.ts | 288 + node_modules/@azure/core-http/CHANGELOG.md | 262 + node_modules/@azure/core-http/LICENSE | 21 + node_modules/@azure/core-http/README.md | 72 + .../@azure/core-http/dist-esm/src/coreHttp.js | 44 + .../core-http/dist-esm/src/coreHttp.js.map | 1 + .../dist-esm/src/createSpanLegacy.js | 19 + .../dist-esm/src/createSpanLegacy.js.map | 1 + .../src/credentials/accessTokenCache.js | 41 + .../src/credentials/accessTokenCache.js.map | 1 + .../src/credentials/accessTokenRefresher.js | 46 + .../credentials/accessTokenRefresher.js.map | 1 + .../src/credentials/apiKeyCredentials.js | 53 + .../src/credentials/apiKeyCredentials.js.map | 1 + .../basicAuthenticationCredentials.js | 50 + .../basicAuthenticationCredentials.js.map | 1 + .../dist-esm/src/credentials/credentials.js | 4 + .../src/credentials/credentials.js.map | 1 + .../credentials/serviceClientCredentials.js | 4 + .../serviceClientCredentials.js.map | 1 + .../src/credentials/topicCredentials.js | 25 + .../src/credentials/topicCredentials.js.map | 1 + .../dist-esm/src/defaultHttpClient.browser.js | 4 + .../src/defaultHttpClient.browser.js.map | 1 + .../dist-esm/src/defaultHttpClient.js | 4 + .../dist-esm/src/defaultHttpClient.js.map | 1 + .../core-http/dist-esm/src/httpClient.js | 4 + .../core-http/dist-esm/src/httpClient.js.map | 1 + .../core-http/dist-esm/src/httpClientCache.js | 11 + .../dist-esm/src/httpClientCache.js.map | 1 + .../core-http/dist-esm/src/httpHeaders.js | 151 + .../core-http/dist-esm/src/httpHeaders.js.map | 1 + .../dist-esm/src/httpOperationResponse.js | 4 + .../dist-esm/src/httpOperationResponse.js.map | 1 + .../dist-esm/src/httpPipelineLogLevel.js | 25 + .../dist-esm/src/httpPipelineLogLevel.js.map | 1 + .../dist-esm/src/httpPipelineLogger.js | 35 + .../dist-esm/src/httpPipelineLogger.js.map | 1 + .../@azure/core-http/dist-esm/src/log.js | 5 + .../@azure/core-http/dist-esm/src/log.js.map | 1 + .../dist-esm/src/nodeFetchHttpClient.js | 313 + .../dist-esm/src/nodeFetchHttpClient.js.map | 1 + .../dist-esm/src/operationArguments.js | 4 + .../dist-esm/src/operationArguments.js.map | 1 + .../dist-esm/src/operationOptions.js | 22 + .../dist-esm/src/operationOptions.js.map | 1 + .../dist-esm/src/operationParameter.js | 24 + .../dist-esm/src/operationParameter.js.map | 1 + .../dist-esm/src/operationResponse.js | 4 + .../dist-esm/src/operationResponse.js.map | 1 + .../core-http/dist-esm/src/operationSpec.js | 19 + .../dist-esm/src/operationSpec.js.map | 1 + .../core-http/dist-esm/src/pipelineOptions.js | 4 + .../dist-esm/src/pipelineOptions.js.map | 1 + .../bearerTokenAuthenticationPolicy.js | 183 + .../bearerTokenAuthenticationPolicy.js.map | 1 + .../src/policies/deserializationPolicy.js | 239 + .../src/policies/deserializationPolicy.js.map | 1 + ...ableResponseDecompressionPolicy.browser.js | 28 + ...ResponseDecompressionPolicy.browser.js.map | 1 + .../disableResponseDecompressionPolicy.js | 42 + .../disableResponseDecompressionPolicy.js.map | 1 + .../src/policies/exponentialRetryPolicy.js | 106 + .../policies/exponentialRetryPolicy.js.map | 1 + .../policies/generateClientRequestIdPolicy.js | 27 + .../generateClientRequestIdPolicy.js.map | 1 + .../dist-esm/src/policies/keepAlivePolicy.js | 48 + .../src/policies/keepAlivePolicy.js.map | 1 + .../dist-esm/src/policies/logPolicy.js | 78 + .../dist-esm/src/policies/logPolicy.js.map | 1 + .../policies/msRestUserAgentPolicy.browser.js | 14 + .../msRestUserAgentPolicy.browser.js.map | 1 + .../src/policies/msRestUserAgentPolicy.js | 19 + .../src/policies/msRestUserAgentPolicy.js.map | 1 + .../policies/msRestUserAgentPolicy.native.js | 19 + .../msRestUserAgentPolicy.native.js.map | 1 + .../dist-esm/src/policies/ndJsonPolicy.js | 37 + .../dist-esm/src/policies/ndJsonPolicy.js.map | 1 + .../src/policies/proxyPolicy.browser.js | 24 + .../src/policies/proxyPolicy.browser.js.map | 1 + .../dist-esm/src/policies/proxyPolicy.js | 150 + .../dist-esm/src/policies/proxyPolicy.js.map | 1 + .../dist-esm/src/policies/redirectPolicy.js | 64 + .../src/policies/redirectPolicy.js.map | 1 + .../dist-esm/src/policies/requestPolicy.js | 70 + .../src/policies/requestPolicy.js.map | 1 + .../src/policies/rpRegistrationPolicy.js | 153 + .../src/policies/rpRegistrationPolicy.js.map | 1 + .../dist-esm/src/policies/signingPolicy.js | 31 + .../src/policies/signingPolicy.js.map | 1 + .../src/policies/systemErrorRetryPolicy.js | 78 + .../policies/systemErrorRetryPolicy.js.map | 1 + .../src/policies/throttlingRetryPolicy.js | 97 + .../src/policies/throttlingRetryPolicy.js.map | 1 + .../dist-esm/src/policies/tracingPolicy.js | 126 + .../src/policies/tracingPolicy.js.map | 1 + .../dist-esm/src/policies/userAgentPolicy.js | 78 + .../src/policies/userAgentPolicy.js.map | 1 + .../core-http/dist-esm/src/proxyAgent.js | 57 + .../core-http/dist-esm/src/proxyAgent.js.map | 1 + .../dist-esm/src/queryCollectionFormat.js | 29 + .../dist-esm/src/queryCollectionFormat.js.map | 1 + .../core-http/dist-esm/src/restError.js | 34 + .../core-http/dist-esm/src/restError.js.map | 1 + .../core-http/dist-esm/src/serializer.js | 906 + .../core-http/dist-esm/src/serializer.js.map | 1 + .../core-http/dist-esm/src/serviceClient.js | 622 + .../dist-esm/src/serviceClient.js.map | 1 + .../@azure/core-http/dist-esm/src/url.js | 597 + .../@azure/core-http/dist-esm/src/url.js.map | 1 + .../dist-esm/src/util/base64.browser.js | 33 + .../dist-esm/src/util/base64.browser.js.map | 1 + .../core-http/dist-esm/src/util/base64.js | 27 + .../core-http/dist-esm/src/util/base64.js.map | 1 + .../core-http/dist-esm/src/util/constants.js | 74 + .../dist-esm/src/util/constants.js.map | 1 + .../core-http/dist-esm/src/util/delay.js | 46 + .../core-http/dist-esm/src/util/delay.js.map | 1 + .../src/util/exponentialBackoffStrategy.js | 51 + .../util/exponentialBackoffStrategy.js.map | 1 + .../dist-esm/src/util/inspect.browser.js | 4 + .../dist-esm/src/util/inspect.browser.js.map | 1 + .../core-http/dist-esm/src/util/inspect.js | 5 + .../dist-esm/src/util/inspect.js.map | 1 + .../core-http/dist-esm/src/util/sanitizer.js | 137 + .../dist-esm/src/util/sanitizer.js.map | 1 + .../dist-esm/src/util/serializer.common.js | 11 + .../src/util/serializer.common.js.map | 1 + .../src/util/throttlingRetryStrategy.js | 7 + .../src/util/throttlingRetryStrategy.js.map | 1 + .../core-http/dist-esm/src/util/typeguards.js | 11 + .../dist-esm/src/util/typeguards.js.map | 1 + .../core-http/dist-esm/src/util/utils.js | 213 + .../core-http/dist-esm/src/util/utils.js.map | 1 + .../dist-esm/src/util/xml.browser.js | 195 + .../dist-esm/src/util/xml.browser.js.map | 1 + .../@azure/core-http/dist-esm/src/util/xml.js | 96 + .../core-http/dist-esm/src/util/xml.js.map | 1 + .../core-http/dist-esm/src/webResource.js | 264 + .../core-http/dist-esm/src/webResource.js.map | 1 + .../core-http/dist-esm/src/xhrHttpClient.js | 176 + .../dist-esm/src/xhrHttpClient.js.map | 1 + node_modules/@azure/core-http/dist/index.js | 5536 ++ .../@azure/core-http/dist/index.js.map | 1 + node_modules/@azure/core-http/dom-shim.d.ts | 10 + .../@azure/core-http/node_modules/.bin/uuid | 1 + .../core-http/node_modules/form-data/License | 19 + .../node_modules/form-data/README.md.bak | 358 + .../node_modules/form-data/Readme.md | 358 + .../node_modules/form-data/index.d.ts | 62 + .../node_modules/form-data/lib/browser.js | 2 + .../node_modules/form-data/lib/form_data.js | 501 + .../node_modules/form-data/lib/populate.js | 10 + .../node_modules/form-data/package.json | 68 + .../node_modules/tough-cookie/LICENSE | 12 + .../node_modules/tough-cookie/README.md | 582 + .../node_modules/tough-cookie/lib/cookie.js | 1671 + .../node_modules/tough-cookie/lib/memstore.js | 190 + .../tough-cookie/lib/pathMatch.js | 61 + .../tough-cookie/lib/permuteDomain.js | 70 + .../tough-cookie/lib/pubsuffix-psl.js | 38 + .../node_modules/tough-cookie/lib/store.js | 76 + .../node_modules/tough-cookie/lib/version.js | 2 + .../node_modules/tough-cookie/package.json | 109 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../core-http/node_modules/tslib/LICENSE.txt | 12 + .../core-http/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../core-http/node_modules/tslib/package.json | 38 + .../core-http/node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../core-http/node_modules/tslib/tslib.es6.js | 248 + .../core-http/node_modules/tslib/tslib.html | 1 + .../core-http/node_modules/tslib/tslib.js | 317 + .../core-http/node_modules/uuid/CHANGELOG.md | 229 + .../node_modules/uuid/CONTRIBUTING.md | 18 + .../core-http/node_modules/uuid/LICENSE.md | 9 + .../core-http/node_modules/uuid/README.md | 505 + .../core-http/node_modules/uuid/dist/bin/uuid | 2 + .../uuid/dist/esm-browser/index.js | 9 + .../node_modules/uuid/dist/esm-browser/md5.js | 215 + .../node_modules/uuid/dist/esm-browser/nil.js | 1 + .../uuid/dist/esm-browser/parse.js | 35 + .../uuid/dist/esm-browser/regex.js | 1 + .../node_modules/uuid/dist/esm-browser/rng.js | 19 + .../uuid/dist/esm-browser/sha1.js | 96 + .../uuid/dist/esm-browser/stringify.js | 30 + .../node_modules/uuid/dist/esm-browser/v1.js | 95 + .../node_modules/uuid/dist/esm-browser/v3.js | 4 + .../node_modules/uuid/dist/esm-browser/v35.js | 64 + .../node_modules/uuid/dist/esm-browser/v4.js | 24 + .../node_modules/uuid/dist/esm-browser/v5.js | 4 + .../uuid/dist/esm-browser/validate.js | 7 + .../uuid/dist/esm-browser/version.js | 11 + .../node_modules/uuid/dist/esm-node/index.js | 9 + .../node_modules/uuid/dist/esm-node/md5.js | 13 + .../node_modules/uuid/dist/esm-node/nil.js | 1 + .../node_modules/uuid/dist/esm-node/parse.js | 35 + .../node_modules/uuid/dist/esm-node/regex.js | 1 + .../node_modules/uuid/dist/esm-node/rng.js | 12 + .../node_modules/uuid/dist/esm-node/sha1.js | 13 + .../uuid/dist/esm-node/stringify.js | 29 + .../node_modules/uuid/dist/esm-node/v1.js | 95 + .../node_modules/uuid/dist/esm-node/v3.js | 4 + .../node_modules/uuid/dist/esm-node/v35.js | 64 + .../node_modules/uuid/dist/esm-node/v4.js | 24 + .../node_modules/uuid/dist/esm-node/v5.js | 4 + .../uuid/dist/esm-node/validate.js | 7 + .../uuid/dist/esm-node/version.js | 11 + .../core-http/node_modules/uuid/dist/index.js | 79 + .../node_modules/uuid/dist/md5-browser.js | 223 + .../core-http/node_modules/uuid/dist/md5.js | 23 + .../core-http/node_modules/uuid/dist/nil.js | 8 + .../core-http/node_modules/uuid/dist/parse.js | 45 + .../core-http/node_modules/uuid/dist/regex.js | 8 + .../node_modules/uuid/dist/rng-browser.js | 26 + .../core-http/node_modules/uuid/dist/rng.js | 24 + .../node_modules/uuid/dist/sha1-browser.js | 104 + .../core-http/node_modules/uuid/dist/sha1.js | 23 + .../node_modules/uuid/dist/stringify.js | 39 + .../node_modules/uuid/dist/umd/uuid.min.js | 1 + .../node_modules/uuid/dist/umd/uuidNIL.min.js | 1 + .../uuid/dist/umd/uuidParse.min.js | 1 + .../uuid/dist/umd/uuidStringify.min.js | 1 + .../uuid/dist/umd/uuidValidate.min.js | 1 + .../uuid/dist/umd/uuidVersion.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv1.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv3.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv4.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv5.min.js | 1 + .../node_modules/uuid/dist/uuid-bin.js | 85 + .../core-http/node_modules/uuid/dist/v1.js | 107 + .../core-http/node_modules/uuid/dist/v3.js | 16 + .../core-http/node_modules/uuid/dist/v35.js | 78 + .../core-http/node_modules/uuid/dist/v4.js | 37 + .../core-http/node_modules/uuid/dist/v5.js | 16 + .../node_modules/uuid/dist/validate.js | 17 + .../node_modules/uuid/dist/version.js | 21 + .../core-http/node_modules/uuid/package.json | 135 + .../core-http/node_modules/uuid/wrapper.mjs | 10 + node_modules/@azure/core-http/package.json | 178 + .../core-http/types/3.1/src/coreHttp.d.ts | 51 + .../types/3.1/src/createSpanLegacy.d.ts | 32 + .../3.1/src/credentials/accessTokenCache.d.ts | 48 + .../src/credentials/accessTokenRefresher.d.ts | 32 + .../src/credentials/apiKeyCredentials.d.ts | 44 + .../basicAuthenticationCredentials.d.ts | 36 + .../3.1/src/credentials/credentials.d.ts | 6 + .../credentials/serviceClientCredentials.d.ts | 14 + .../3.1/src/credentials/topicCredentials.d.ts | 13 + .../3.1/src/defaultHttpClient.browser.d.ts | 2 + .../types/3.1/src/defaultHttpClient.d.ts | 2 + .../core-http/types/3.1/src/httpClient.d.ts | 7 + .../types/3.1/src/httpClientCache.d.ts | 3 + .../core-http/types/3.1/src/httpHeaders.d.ts | 136 + .../types/3.1/src/httpOperationResponse.d.ts | 78 + .../types/3.1/src/httpPipelineLogLevel.d.ts | 22 + .../types/3.1/src/httpPipelineLogger.d.ts | 35 + .../@azure/core-http/types/3.1/src/log.d.ts | 2 + .../types/3.1/src/nodeFetchHttpClient.d.ts | 64 + .../types/3.1/src/operationArguments.d.ts | 15 + .../types/3.1/src/operationOptions.d.ts | 56 + .../types/3.1/src/operationParameter.d.ts | 54 + .../types/3.1/src/operationResponse.d.ts | 19 + .../types/3.1/src/operationSpec.d.ts | 77 + .../types/3.1/src/pipelineOptions.d.ts | 63 + .../bearerTokenAuthenticationPolicy.d.ts | 33 + .../src/policies/deserializationPolicy.d.ts | 59 + ...leResponseDecompressionPolicy.browser.d.ts | 13 + .../disableResponseDecompressionPolicy.d.ts | 29 + .../src/policies/exponentialRetryPolicy.d.ts | 73 + .../generateClientRequestIdPolicy.d.ts | 14 + .../3.1/src/policies/keepAlivePolicy.d.ts | 46 + .../types/3.1/src/policies/logPolicy.d.ts | 77 + .../msRestUserAgentPolicy.browser.d.ts | 4 + .../src/policies/msRestUserAgentPolicy.d.ts | 4 + .../msRestUserAgentPolicy.native.d.ts | 4 + .../types/3.1/src/policies/ndJsonPolicy.d.ts | 3 + .../3.1/src/policies/proxyPolicy.browser.d.ts | 11 + .../types/3.1/src/policies/proxyPolicy.d.ts | 37 + .../3.1/src/policies/redirectPolicy.d.ts | 33 + .../types/3.1/src/policies/requestPolicy.d.ts | 102 + .../src/policies/rpRegistrationPolicy.d.ts | 10 + .../types/3.1/src/policies/signingPolicy.d.ts | 20 + .../src/policies/systemErrorRetryPolicy.d.ts | 28 + .../src/policies/throttlingRetryPolicy.d.ts | 35 + .../types/3.1/src/policies/tracingPolicy.d.ts | 31 + .../3.1/src/policies/userAgentPolicy.d.ts | 49 + .../core-http/types/3.1/src/proxyAgent.d.ts | 14 + .../types/3.1/src/queryCollectionFormat.d.ts | 26 + .../core-http/types/3.1/src/restError.d.ts | 37 + .../core-http/types/3.1/src/serializer.d.ts | 356 + .../types/3.1/src/serviceClient.d.ts | 168 + .../@azure/core-http/types/3.1/src/url.d.ts | 160 + .../types/3.1/src/util/base64.browser.d.ts | 16 + .../core-http/types/3.1/src/util/base64.d.ts | 16 + .../types/3.1/src/util/constants.d.ts | 72 + .../core-http/types/3.1/src/util/delay.d.ts | 15 + .../src/util/exponentialBackoffStrategy.d.ts | 40 + .../types/3.1/src/util/inspect.browser.d.ts | 2 + .../core-http/types/3.1/src/util/inspect.d.ts | 2 + .../types/3.1/src/util/sanitizer.d.ts | 25 + .../types/3.1/src/util/serializer.common.d.ts | 26 + .../3.1/src/util/throttlingRetryStrategy.d.ts | 5 + .../types/3.1/src/util/typeguards.d.ts | 7 + .../core-http/types/3.1/src/util/utils.d.ts | 131 + .../types/3.1/src/util/xml.browser.d.ts | 4 + .../core-http/types/3.1/src/util/xml.d.ts | 14 + .../core-http/types/3.1/src/webResource.d.ts | 445 + .../types/3.1/src/xhrHttpClient.d.ts | 12 + .../core-http/types/latest/src/coreHttp.d.ts | 51 + .../types/latest/src/coreHttp.d.ts.map | 1 + .../types/latest/src/createSpanLegacy.d.ts | 32 + .../latest/src/createSpanLegacy.d.ts.map | 1 + .../src/credentials/accessTokenCache.d.ts | 48 + .../src/credentials/accessTokenCache.d.ts.map | 1 + .../src/credentials/accessTokenRefresher.d.ts | 32 + .../credentials/accessTokenRefresher.d.ts.map | 1 + .../src/credentials/apiKeyCredentials.d.ts | 44 + .../credentials/apiKeyCredentials.d.ts.map | 1 + .../basicAuthenticationCredentials.d.ts | 36 + .../basicAuthenticationCredentials.d.ts.map | 1 + .../latest/src/credentials/credentials.d.ts | 6 + .../src/credentials/credentials.d.ts.map | 1 + .../credentials/serviceClientCredentials.d.ts | 14 + .../serviceClientCredentials.d.ts.map | 1 + .../src/credentials/topicCredentials.d.ts | 13 + .../src/credentials/topicCredentials.d.ts.map | 1 + .../latest/src/defaultHttpClient.browser.d.ts | 2 + .../src/defaultHttpClient.browser.d.ts.map | 1 + .../types/latest/src/defaultHttpClient.d.ts | 2 + .../latest/src/defaultHttpClient.d.ts.map | 1 + .../types/latest/src/httpClient.d.ts | 7 + .../types/latest/src/httpClient.d.ts.map | 1 + .../types/latest/src/httpClientCache.d.ts | 3 + .../types/latest/src/httpClientCache.d.ts.map | 1 + .../types/latest/src/httpHeaders.d.ts | 136 + .../types/latest/src/httpHeaders.d.ts.map | 1 + .../latest/src/httpOperationResponse.d.ts | 78 + .../latest/src/httpOperationResponse.d.ts.map | 1 + .../latest/src/httpPipelineLogLevel.d.ts | 22 + .../latest/src/httpPipelineLogLevel.d.ts.map | 1 + .../types/latest/src/httpPipelineLogger.d.ts | 35 + .../latest/src/httpPipelineLogger.d.ts.map | 1 + .../core-http/types/latest/src/log.d.ts | 2 + .../core-http/types/latest/src/log.d.ts.map | 1 + .../types/latest/src/nodeFetchHttpClient.d.ts | 64 + .../latest/src/nodeFetchHttpClient.d.ts.map | 1 + .../types/latest/src/operationArguments.d.ts | 15 + .../latest/src/operationArguments.d.ts.map | 1 + .../types/latest/src/operationOptions.d.ts | 56 + .../latest/src/operationOptions.d.ts.map | 1 + .../types/latest/src/operationParameter.d.ts | 54 + .../latest/src/operationParameter.d.ts.map | 1 + .../types/latest/src/operationResponse.d.ts | 19 + .../latest/src/operationResponse.d.ts.map | 1 + .../types/latest/src/operationSpec.d.ts | 77 + .../types/latest/src/operationSpec.d.ts.map | 1 + .../types/latest/src/pipelineOptions.d.ts | 63 + .../types/latest/src/pipelineOptions.d.ts.map | 1 + .../bearerTokenAuthenticationPolicy.d.ts | 33 + .../bearerTokenAuthenticationPolicy.d.ts.map | 1 + .../src/policies/deserializationPolicy.d.ts | 59 + .../policies/deserializationPolicy.d.ts.map | 1 + ...leResponseDecompressionPolicy.browser.d.ts | 13 + ...sponseDecompressionPolicy.browser.d.ts.map | 1 + .../disableResponseDecompressionPolicy.d.ts | 29 + ...isableResponseDecompressionPolicy.d.ts.map | 1 + .../src/policies/exponentialRetryPolicy.d.ts | 73 + .../policies/exponentialRetryPolicy.d.ts.map | 1 + .../generateClientRequestIdPolicy.d.ts | 14 + .../generateClientRequestIdPolicy.d.ts.map | 1 + .../latest/src/policies/keepAlivePolicy.d.ts | 46 + .../src/policies/keepAlivePolicy.d.ts.map | 1 + .../types/latest/src/policies/logPolicy.d.ts | 79 + .../latest/src/policies/logPolicy.d.ts.map | 1 + .../msRestUserAgentPolicy.browser.d.ts | 4 + .../msRestUserAgentPolicy.browser.d.ts.map | 1 + .../src/policies/msRestUserAgentPolicy.d.ts | 4 + .../policies/msRestUserAgentPolicy.d.ts.map | 1 + .../msRestUserAgentPolicy.native.d.ts | 4 + .../msRestUserAgentPolicy.native.d.ts.map | 1 + .../latest/src/policies/ndJsonPolicy.d.ts | 3 + .../latest/src/policies/ndJsonPolicy.d.ts.map | 1 + .../src/policies/proxyPolicy.browser.d.ts | 11 + .../src/policies/proxyPolicy.browser.d.ts.map | 1 + .../latest/src/policies/proxyPolicy.d.ts | 37 + .../latest/src/policies/proxyPolicy.d.ts.map | 1 + .../latest/src/policies/redirectPolicy.d.ts | 33 + .../src/policies/redirectPolicy.d.ts.map | 1 + .../latest/src/policies/requestPolicy.d.ts | 102 + .../src/policies/requestPolicy.d.ts.map | 1 + .../src/policies/rpRegistrationPolicy.d.ts | 10 + .../policies/rpRegistrationPolicy.d.ts.map | 1 + .../latest/src/policies/signingPolicy.d.ts | 20 + .../src/policies/signingPolicy.d.ts.map | 1 + .../src/policies/systemErrorRetryPolicy.d.ts | 28 + .../policies/systemErrorRetryPolicy.d.ts.map | 1 + .../src/policies/throttlingRetryPolicy.d.ts | 35 + .../policies/throttlingRetryPolicy.d.ts.map | 1 + .../latest/src/policies/tracingPolicy.d.ts | 31 + .../src/policies/tracingPolicy.d.ts.map | 1 + .../latest/src/policies/userAgentPolicy.d.ts | 49 + .../src/policies/userAgentPolicy.d.ts.map | 1 + .../types/latest/src/proxyAgent.d.ts | 14 + .../types/latest/src/proxyAgent.d.ts.map | 1 + .../latest/src/queryCollectionFormat.d.ts | 26 + .../latest/src/queryCollectionFormat.d.ts.map | 1 + .../core-http/types/latest/src/restError.d.ts | 37 + .../types/latest/src/restError.d.ts.map | 1 + .../types/latest/src/serializer.d.ts | 356 + .../types/latest/src/serializer.d.ts.map | 1 + .../types/latest/src/serviceClient.d.ts | 168 + .../types/latest/src/serviceClient.d.ts.map | 1 + .../core-http/types/latest/src/url.d.ts | 160 + .../core-http/types/latest/src/url.d.ts.map | 1 + .../types/latest/src/util/base64.browser.d.ts | 16 + .../latest/src/util/base64.browser.d.ts.map | 1 + .../types/latest/src/util/base64.d.ts | 16 + .../types/latest/src/util/base64.d.ts.map | 1 + .../types/latest/src/util/constants.d.ts | 72 + .../types/latest/src/util/constants.d.ts.map | 1 + .../types/latest/src/util/delay.d.ts | 15 + .../types/latest/src/util/delay.d.ts.map | 1 + .../src/util/exponentialBackoffStrategy.d.ts | 40 + .../util/exponentialBackoffStrategy.d.ts.map | 1 + .../latest/src/util/inspect.browser.d.ts | 2 + .../latest/src/util/inspect.browser.d.ts.map | 1 + .../types/latest/src/util/inspect.d.ts | 2 + .../types/latest/src/util/inspect.d.ts.map | 1 + .../types/latest/src/util/sanitizer.d.ts | 25 + .../types/latest/src/util/sanitizer.d.ts.map | 1 + .../latest/src/util/serializer.common.d.ts | 26 + .../src/util/serializer.common.d.ts.map | 1 + .../src/util/throttlingRetryStrategy.d.ts | 5 + .../src/util/throttlingRetryStrategy.d.ts.map | 1 + .../types/latest/src/util/typeguards.d.ts | 7 + .../types/latest/src/util/typeguards.d.ts.map | 1 + .../types/latest/src/util/utils.d.ts | 131 + .../types/latest/src/util/utils.d.ts.map | 1 + .../types/latest/src/util/xml.browser.d.ts | 4 + .../latest/src/util/xml.browser.d.ts.map | 1 + .../core-http/types/latest/src/util/xml.d.ts | 14 + .../types/latest/src/util/xml.d.ts.map | 1 + .../types/latest/src/webResource.d.ts | 445 + .../types/latest/src/webResource.d.ts.map | 1 + .../types/latest/src/xhrHttpClient.d.ts | 12 + .../types/latest/src/xhrHttpClient.d.ts.map | 1 + node_modules/@azure/core-lro/CHANGELOG.md | 82 + node_modules/@azure/core-lro/LICENSE | 21 + node_modules/@azure/core-lro/README.md | 92 + .../@azure/core-lro/dist-esm/src/index.js | 6 + .../@azure/core-lro/dist-esm/src/index.js.map | 1 + .../dist-esm/src/lroEngine/bodyPolling.js | 25 + .../dist-esm/src/lroEngine/bodyPolling.js.map | 1 + .../core-lro/dist-esm/src/lroEngine/index.js | 4 + .../dist-esm/src/lroEngine/index.js.map | 1 + .../dist-esm/src/lroEngine/locationPolling.js | 47 + .../src/lroEngine/locationPolling.js.map | 1 + .../core-lro/dist-esm/src/lroEngine/logger.js | 9 + .../dist-esm/src/lroEngine/logger.js.map | 1 + .../dist-esm/src/lroEngine/lroEngine.js | 34 + .../dist-esm/src/lroEngine/lroEngine.js.map | 1 + .../core-lro/dist-esm/src/lroEngine/models.js | 9 + .../dist-esm/src/lroEngine/models.js.map | 1 + .../dist-esm/src/lroEngine/operation.js | 93 + .../dist-esm/src/lroEngine/operation.js.map | 1 + .../dist-esm/src/lroEngine/passthrough.js | 6 + .../dist-esm/src/lroEngine/passthrough.js.map | 1 + .../dist-esm/src/lroEngine/requestUtils.js | 78 + .../src/lroEngine/requestUtils.js.map | 1 + .../dist-esm/src/lroEngine/stateMachine.js | 74 + .../src/lroEngine/stateMachine.js.map | 1 + .../core-lro/dist-esm/src/pollOperation.js | 4 + .../dist-esm/src/pollOperation.js.map | 1 + .../@azure/core-lro/dist-esm/src/poller.js | 390 + .../core-lro/dist-esm/src/poller.js.map | 1 + node_modules/@azure/core-lro/dist/index.js | 751 + .../@azure/core-lro/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../core-lro/node_modules/tslib/LICENSE.txt | 12 + .../core-lro/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../core-lro/node_modules/tslib/package.json | 38 + .../core-lro/node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../core-lro/node_modules/tslib/tslib.es6.js | 248 + .../core-lro/node_modules/tslib/tslib.html | 1 + .../core-lro/node_modules/tslib/tslib.js | 317 + node_modules/@azure/core-lro/package.json | 133 + .../@azure/core-lro/types/core-lro.d.ts | 565 + node_modules/@azure/core-paging/LICENSE | 21 + node_modules/@azure/core-paging/README.md | 61 + .../dist-esm/src/getPagedAsyncIterator.js | 72 + .../dist-esm/src/getPagedAsyncIterator.js.map | 1 + .../@azure/core-paging/dist-esm/src/index.js | 5 + .../core-paging/dist-esm/src/index.js.map | 1 + .../@azure/core-paging/dist-esm/src/models.js | 4 + .../core-paging/dist-esm/src/models.js.map | 1 + node_modules/@azure/core-paging/dist/index.js | 78 + .../@azure/core-paging/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../node_modules/tslib/LICENSE.txt | 12 + .../core-paging/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../node_modules/tslib/package.json | 38 + .../core-paging/node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../node_modules/tslib/tslib.es6.js | 248 + .../core-paging/node_modules/tslib/tslib.html | 1 + .../core-paging/node_modules/tslib/tslib.js | 317 + node_modules/@azure/core-paging/package.json | 108 + .../core-paging/types/3.1/core-paging.d.ts | 60 + .../getPagedAsyncIteratorSample.d.ts | 2 + .../types/3.1/src/getPagedAsyncIterator.d.ts | 10 + .../core-paging/types/3.1/src/index.d.ts | 3 + .../core-paging/types/3.1/src/models.d.ts | 52 + .../3.1/test/getPagedAsyncIterator.spec.d.ts | 2 + .../core-paging/types/latest/core-paging.d.ts | 64 + node_modules/@azure/core-tracing/CHANGELOG.md | 81 + node_modules/@azure/core-tracing/LICENSE | 21 + node_modules/@azure/core-tracing/README.md | 72 + .../core-tracing/dist-esm/src/createSpan.js | 63 + .../dist-esm/src/createSpan.js.map | 1 + .../@azure/core-tracing/dist-esm/src/index.js | 9 + .../core-tracing/dist-esm/src/index.js.map | 1 + .../core-tracing/dist-esm/src/interfaces.js | 102 + .../dist-esm/src/interfaces.js.map | 1 + .../dist-esm/src/utils/traceParentHeader.js | 48 + .../src/utils/traceParentHeader.js.map | 1 + .../@azure/core-tracing/dist/index.js | 219 + .../@azure/core-tracing/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../node_modules/tslib/LICENSE.txt | 12 + .../core-tracing/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../node_modules/tslib/package.json | 38 + .../node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../node_modules/tslib/tslib.es6.js | 248 + .../node_modules/tslib/tslib.html | 1 + .../core-tracing/node_modules/tslib/tslib.js | 317 + node_modules/@azure/core-tracing/package.json | 99 + .../core-tracing/types/core-tracing.d.ts | 531 + node_modules/@azure/logger/CHANGELOG.md | 27 + node_modules/@azure/logger/LICENSE | 21 + node_modules/@azure/logger/README.md | 88 + .../@azure/logger/dist-esm/src/debug.js | 93 + .../@azure/logger/dist-esm/src/debug.js.map | 1 + .../@azure/logger/dist-esm/src/index.js | 103 + .../@azure/logger/dist-esm/src/index.js.map | 1 + .../@azure/logger/dist-esm/src/log.browser.js | 23 + .../logger/dist-esm/src/log.browser.js.map | 1 + .../@azure/logger/dist-esm/src/log.js | 8 + .../@azure/logger/dist-esm/src/log.js.map | 1 + node_modules/@azure/logger/dist/index.js | 210 + node_modules/@azure/logger/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../logger/node_modules/tslib/LICENSE.txt | 12 + .../logger/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../logger/node_modules/tslib/package.json | 38 + .../logger/node_modules/tslib/tslib.d.ts | 398 + .../logger/node_modules/tslib/tslib.es6.html | 1 + .../logger/node_modules/tslib/tslib.es6.js | 248 + .../logger/node_modules/tslib/tslib.html | 1 + .../@azure/logger/node_modules/tslib/tslib.js | 317 + node_modules/@azure/logger/package.json | 109 + node_modules/@azure/logger/types/logger.d.ts | 104 + node_modules/@azure/ms-rest-js/LICENSE | 21 + node_modules/@azure/ms-rest-js/README.md | 41 + .../@azure/ms-rest-js/ThirdPartyNotices.txt | 35 + .../@azure/ms-rest-js/dist/msRest.browser.js | 4405 ++ .../ms-rest-js/dist/msRest.browser.js.map | 1 + .../ms-rest-js/dist/msRest.browser.min.js | 19 + .../ms-rest-js/dist/msRest.browser.min.js.map | 1 + .../@azure/ms-rest-js/dist/msRest.node.js | 5430 ++ .../@azure/ms-rest-js/dist/msRest.node.js.map | 1 + node_modules/@azure/ms-rest-js/dom-shim.d.ts | 10 + .../es/lib/browserFetchHttpClient.d.ts | 9 + .../es/lib/browserFetchHttpClient.d.ts.map | 1 + .../es/lib/browserFetchHttpClient.js | 22 + .../es/lib/browserFetchHttpClient.js.map | 1 + .../es/lib/credentials/apiKeyCredentials.d.ts | 46 + .../credentials/apiKeyCredentials.d.ts.map | 1 + .../es/lib/credentials/apiKeyCredentials.js | 56 + .../lib/credentials/apiKeyCredentials.js.map | 1 + .../azureIdentityTokenCredentialAdapter.d.ts | 20 + ...ureIdentityTokenCredentialAdapter.d.ts.map | 1 + .../azureIdentityTokenCredentialAdapter.js | 66 + ...azureIdentityTokenCredentialAdapter.js.map | 1 + .../basicAuthenticationCredentials.d.ts | 24 + .../basicAuthenticationCredentials.d.ts.map | 1 + .../basicAuthenticationCredentials.js | 47 + .../basicAuthenticationCredentials.js.map | 1 + .../es/lib/credentials/credentials.d.ts | 2 + .../es/lib/credentials/credentials.d.ts.map | 1 + .../es/lib/credentials/credentials.js | 3 + .../es/lib/credentials/credentials.js.map | 1 + .../es/lib/credentials/domainCredentials.d.ts | 11 + .../credentials/domainCredentials.d.ts.map | 1 + .../es/lib/credentials/domainCredentials.js | 29 + .../lib/credentials/domainCredentials.js.map | 1 + .../credentials/serviceClientCredentials.d.ts | 11 + .../serviceClientCredentials.d.ts.map | 1 + .../credentials/serviceClientCredentials.js | 3 + .../serviceClientCredentials.js.map | 1 + .../es/lib/credentials/tokenCredentials.d.ts | 25 + .../lib/credentials/tokenCredentials.d.ts.map | 1 + .../es/lib/credentials/tokenCredentials.js | 42 + .../lib/credentials/tokenCredentials.js.map | 1 + .../es/lib/credentials/tokenResponse.d.ts | 10 + .../es/lib/credentials/tokenResponse.d.ts.map | 1 + .../es/lib/credentials/tokenResponse.js | 3 + .../es/lib/credentials/tokenResponse.js.map | 1 + .../es/lib/credentials/topicCredentials.d.ts | 11 + .../lib/credentials/topicCredentials.d.ts.map | 1 + .../es/lib/credentials/topicCredentials.js | 29 + .../lib/credentials/topicCredentials.js.map | 1 + .../es/lib/defaultHttpClient.browser.d.ts | 2 + .../es/lib/defaultHttpClient.browser.d.ts.map | 1 + .../es/lib/defaultHttpClient.browser.js | 4 + .../es/lib/defaultHttpClient.browser.js.map | 1 + .../ms-rest-js/es/lib/defaultHttpClient.d.ts | 2 + .../es/lib/defaultHttpClient.d.ts.map | 1 + .../ms-rest-js/es/lib/defaultHttpClient.js | 4 + .../es/lib/defaultHttpClient.js.map | 1 + .../ms-rest-js/es/lib/fetchHttpClient.d.ts | 23 + .../es/lib/fetchHttpClient.d.ts.map | 1 + .../ms-rest-js/es/lib/fetchHttpClient.js | 214 + .../ms-rest-js/es/lib/fetchHttpClient.js.map | 1 + .../@azure/ms-rest-js/es/lib/httpClient.d.ts | 7 + .../ms-rest-js/es/lib/httpClient.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/httpClient.js | 3 + .../ms-rest-js/es/lib/httpClient.js.map | 1 + .../@azure/ms-rest-js/es/lib/httpHeaders.d.ts | 132 + .../ms-rest-js/es/lib/httpHeaders.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/httpHeaders.js | 140 + .../ms-rest-js/es/lib/httpHeaders.js.map | 1 + .../es/lib/httpOperationResponse.d.ts | 83 + .../es/lib/httpOperationResponse.d.ts.map | 1 + .../es/lib/httpOperationResponse.js | 3 + .../es/lib/httpOperationResponse.js.map | 1 + .../es/lib/httpPipelineLogLevel.d.ts | 22 + .../es/lib/httpPipelineLogLevel.d.ts.map | 1 + .../ms-rest-js/es/lib/httpPipelineLogLevel.js | 25 + .../es/lib/httpPipelineLogLevel.js.map | 1 + .../ms-rest-js/es/lib/httpPipelineLogger.d.ts | 35 + .../es/lib/httpPipelineLogger.d.ts.map | 1 + .../ms-rest-js/es/lib/httpPipelineLogger.js | 37 + .../es/lib/httpPipelineLogger.js.map | 1 + .../@azure/ms-rest-js/es/lib/msRest.d.ts | 42 + .../@azure/ms-rest-js/es/lib/msRest.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/msRest.js | 34 + .../@azure/ms-rest-js/es/lib/msRest.js.map | 1 + .../es/lib/nodeFetchHttpClient.d.ts | 10 + .../es/lib/nodeFetchHttpClient.d.ts.map | 1 + .../ms-rest-js/es/lib/nodeFetchHttpClient.js | 109 + .../es/lib/nodeFetchHttpClient.js.map | 1 + .../ms-rest-js/es/lib/operationArguments.d.ts | 15 + .../es/lib/operationArguments.d.ts.map | 1 + .../ms-rest-js/es/lib/operationArguments.js | 3 + .../es/lib/operationArguments.js.map | 1 + .../ms-rest-js/es/lib/operationParameter.d.ts | 51 + .../es/lib/operationParameter.d.ts.map | 1 + .../ms-rest-js/es/lib/operationParameter.js | 24 + .../es/lib/operationParameter.js.map | 1 + .../ms-rest-js/es/lib/operationResponse.d.ts | 15 + .../es/lib/operationResponse.d.ts.map | 1 + .../ms-rest-js/es/lib/operationResponse.js | 3 + .../es/lib/operationResponse.js.map | 1 + .../ms-rest-js/es/lib/operationSpec.d.ts | 68 + .../ms-rest-js/es/lib/operationSpec.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/operationSpec.js | 16 + .../ms-rest-js/es/lib/operationSpec.js.map | 1 + .../es/lib/policies/agentPolicy.browser.d.ts | 10 + .../lib/policies/agentPolicy.browser.d.ts.map | 1 + .../es/lib/policies/agentPolicy.browser.js | 26 + .../lib/policies/agentPolicy.browser.js.map | 1 + .../es/lib/policies/agentPolicy.d.ts | 11 + .../es/lib/policies/agentPolicy.d.ts.map | 1 + .../ms-rest-js/es/lib/policies/agentPolicy.js | 28 + .../es/lib/policies/agentPolicy.js.map | 1 + .../lib/policies/deserializationPolicy.d.ts | 38 + .../policies/deserializationPolicy.d.ts.map | 1 + .../es/lib/policies/deserializationPolicy.js | 213 + .../lib/policies/deserializationPolicy.js.map | 1 + .../lib/policies/exponentialRetryPolicy.d.ts | 48 + .../policies/exponentialRetryPolicy.d.ts.map | 1 + .../es/lib/policies/exponentialRetryPolicy.js | 132 + .../policies/exponentialRetryPolicy.js.map | 1 + .../generateClientRequestIdPolicy.d.ts | 10 + .../generateClientRequestIdPolicy.d.ts.map | 1 + .../policies/generateClientRequestIdPolicy.js | 30 + .../generateClientRequestIdPolicy.js.map | 1 + .../ms-rest-js/es/lib/policies/logPolicy.d.ts | 10 + .../es/lib/policies/logPolicy.d.ts.map | 1 + .../ms-rest-js/es/lib/policies/logPolicy.js | 35 + .../es/lib/policies/logPolicy.js.map | 1 + .../msRestUserAgentPolicy.browser.d.ts | 4 + .../msRestUserAgentPolicy.browser.d.ts.map | 1 + .../policies/msRestUserAgentPolicy.browser.js | 14 + .../msRestUserAgentPolicy.browser.js.map | 1 + .../lib/policies/msRestUserAgentPolicy.d.ts | 4 + .../policies/msRestUserAgentPolicy.d.ts.map | 1 + .../es/lib/policies/msRestUserAgentPolicy.js | 19 + .../lib/policies/msRestUserAgentPolicy.js.map | 1 + .../es/lib/policies/proxyPolicy.browser.d.ts | 11 + .../lib/policies/proxyPolicy.browser.d.ts.map | 1 + .../es/lib/policies/proxyPolicy.browser.js | 29 + .../lib/policies/proxyPolicy.browser.js.map | 1 + .../es/lib/policies/proxyPolicy.d.ts | 24 + .../es/lib/policies/proxyPolicy.d.ts.map | 1 + .../ms-rest-js/es/lib/policies/proxyPolicy.js | 146 + .../es/lib/policies/proxyPolicy.js.map | 1 + .../es/lib/policies/redirectPolicy.d.ts | 18 + .../es/lib/policies/redirectPolicy.d.ts.map | 1 + .../es/lib/policies/redirectPolicy.js | 72 + .../es/lib/policies/redirectPolicy.js.map | 1 + .../es/lib/policies/requestPolicy.d.ts | 71 + .../es/lib/policies/requestPolicy.d.ts.map | 1 + .../es/lib/policies/requestPolicy.js | 60 + .../es/lib/policies/requestPolicy.js.map | 1 + .../es/lib/policies/rpRegistrationPolicy.d.ts | 10 + .../policies/rpRegistrationPolicy.d.ts.map | 1 + .../es/lib/policies/rpRegistrationPolicy.js | 163 + .../lib/policies/rpRegistrationPolicy.js.map | 1 + .../es/lib/policies/signingPolicy.d.ts | 12 + .../es/lib/policies/signingPolicy.d.ts.map | 1 + .../es/lib/policies/signingPolicy.js | 31 + .../es/lib/policies/signingPolicy.js.map | 1 + .../lib/policies/systemErrorRetryPolicy.d.ts | 37 + .../policies/systemErrorRetryPolicy.d.ts.map | 1 + .../es/lib/policies/systemErrorRetryPolicy.js | 135 + .../policies/systemErrorRetryPolicy.js.map | 1 + .../lib/policies/throttlingRetryPolicy.d.ts | 28 + .../policies/throttlingRetryPolicy.d.ts.map | 1 + .../es/lib/policies/throttlingRetryPolicy.js | 88 + .../lib/policies/throttlingRetryPolicy.js.map | 1 + .../es/lib/policies/userAgentPolicy.d.ts | 21 + .../es/lib/policies/userAgentPolicy.d.ts.map | 1 + .../es/lib/policies/userAgentPolicy.js | 68 + .../es/lib/policies/userAgentPolicy.js.map | 1 + .../@azure/ms-rest-js/es/lib/proxyAgent.d.ts | 33 + .../ms-rest-js/es/lib/proxyAgent.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/proxyAgent.js | 43 + .../ms-rest-js/es/lib/proxyAgent.js.map | 1 + .../es/lib/queryCollectionFormat.d.ts | 11 + .../es/lib/queryCollectionFormat.d.ts.map | 1 + .../es/lib/queryCollectionFormat.js | 14 + .../es/lib/queryCollectionFormat.js.map | 1 + .../@azure/ms-rest-js/es/lib/restError.d.ts | 14 + .../ms-rest-js/es/lib/restError.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/restError.js | 22 + .../@azure/ms-rest-js/es/lib/restError.js.map | 1 + .../@azure/ms-rest-js/es/lib/serializer.d.ts | 130 + .../ms-rest-js/es/lib/serializer.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/serializer.js | 789 + .../ms-rest-js/es/lib/serializer.js.map | 1 + .../ms-rest-js/es/lib/serviceClient.d.ts | 168 + .../ms-rest-js/es/lib/serviceClient.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/serviceClient.js | 488 + .../ms-rest-js/es/lib/serviceClient.js.map | 1 + .../@azure/ms-rest-js/es/lib/url.d.ts | 149 + .../@azure/ms-rest-js/es/lib/url.d.ts.map | 1 + node_modules/@azure/ms-rest-js/es/lib/url.js | 595 + .../@azure/ms-rest-js/es/lib/url.js.map | 1 + .../es/lib/util/base64.browser.d.ts | 16 + .../es/lib/util/base64.browser.d.ts.map | 1 + .../ms-rest-js/es/lib/util/base64.browser.js | 33 + .../es/lib/util/base64.browser.js.map | 1 + .../@azure/ms-rest-js/es/lib/util/base64.d.ts | 16 + .../ms-rest-js/es/lib/util/base64.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/util/base64.js | 27 + .../ms-rest-js/es/lib/util/base64.js.map | 1 + .../ms-rest-js/es/lib/util/constants.d.ts | 94 + .../ms-rest-js/es/lib/util/constants.d.ts.map | 1 + .../ms-rest-js/es/lib/util/constants.js | 96 + .../ms-rest-js/es/lib/util/constants.js.map | 1 + .../@azure/ms-rest-js/es/lib/util/utils.d.ts | 157 + .../ms-rest-js/es/lib/util/utils.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/util/utils.js | 229 + .../ms-rest-js/es/lib/util/utils.js.map | 1 + .../ms-rest-js/es/lib/util/xml.browser.d.ts | 5 + .../es/lib/util/xml.browser.d.ts.map | 1 + .../ms-rest-js/es/lib/util/xml.browser.js | 145 + .../ms-rest-js/es/lib/util/xml.browser.js.map | 1 + .../@azure/ms-rest-js/es/lib/util/xml.d.ts | 5 + .../ms-rest-js/es/lib/util/xml.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/util/xml.js | 35 + .../@azure/ms-rest-js/es/lib/util/xml.js.map | 1 + .../@azure/ms-rest-js/es/lib/webResource.d.ts | 337 + .../ms-rest-js/es/lib/webResource.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/webResource.js | 257 + .../ms-rest-js/es/lib/webResource.js.map | 1 + .../ms-rest-js/es/lib/xhrHttpClient.d.ts | 12 + .../ms-rest-js/es/lib/xhrHttpClient.d.ts.map | 1 + .../@azure/ms-rest-js/es/lib/xhrHttpClient.js | 149 + .../ms-rest-js/es/lib/xhrHttpClient.js.map | 1 + .../ms-rest-js/lib/browserFetchHttpClient.ts | 25 + .../lib/credentials/apiKeyCredentials.ts | 89 + .../azureIdentityTokenCredentialAdapter.ts | 60 + .../basicAuthenticationCredentials.ts | 54 + .../ms-rest-js/lib/credentials/credentials.ts | 4 + .../lib/credentials/domainCredentials.ts | 24 + .../credentials/serviceClientCredentials.ts | 14 + .../lib/credentials/tokenCredentials.ts | 48 + .../lib/credentials/tokenResponse.ts | 12 + .../lib/credentials/topicCredentials.ts | 24 + .../lib/defaultHttpClient.browser.ts | 4 + .../ms-rest-js/lib/defaultHttpClient.ts | 4 + .../@azure/ms-rest-js/lib/fetchHttpClient.ts | 256 + .../@azure/ms-rest-js/lib/httpClient.ts | 9 + .../@azure/ms-rest-js/lib/httpHeaders.ts | 230 + .../ms-rest-js/lib/httpOperationResponse.ts | 94 + .../ms-rest-js/lib/httpPipelineLogLevel.ts | 27 + .../ms-rest-js/lib/httpPipelineLogger.ts | 55 + node_modules/@azure/ms-rest-js/lib/msRest.ts | 114 + .../ms-rest-js/lib/nodeFetchHttpClient.ts | 95 + .../ms-rest-js/lib/operationArguments.ts | 19 + .../ms-rest-js/lib/operationParameter.ts | 74 + .../ms-rest-js/lib/operationResponse.ts | 19 + .../@azure/ms-rest-js/lib/operationSpec.ts | 98 + .../lib/policies/agentPolicy.browser.ts | 33 + .../ms-rest-js/lib/policies/agentPolicy.ts | 40 + .../lib/policies/deserializationPolicy.ts | 294 + .../lib/policies/exponentialRetryPolicy.ts | 220 + .../policies/generateClientRequestIdPolicy.ts | 39 + .../ms-rest-js/lib/policies/logPolicy.ts | 47 + .../policies/msRestUserAgentPolicy.browser.ts | 28 + .../lib/policies/msRestUserAgentPolicy.ts | 24 + .../lib/policies/proxyPolicy.browser.ts | 37 + .../ms-rest-js/lib/policies/proxyPolicy.ts | 168 + .../ms-rest-js/lib/policies/redirectPolicy.ts | 105 + .../ms-rest-js/lib/policies/requestPolicy.ts | 98 + .../lib/policies/rpRegistrationPolicy.ts | 197 + .../ms-rest-js/lib/policies/signingPolicy.ts | 42 + .../lib/policies/systemErrorRetryPolicy.ts | 187 + .../lib/policies/throttlingRetryPolicy.ts | 105 + .../lib/policies/userAgentPolicy.ts | 88 + .../@azure/ms-rest-js/lib/proxyAgent.ts | 81 + .../ms-rest-js/lib/queryCollectionFormat.ts | 13 + .../@azure/ms-rest-js/lib/restError.ts | 34 + .../@azure/ms-rest-js/lib/serializer.ts | 1063 + .../@azure/ms-rest-js/lib/serviceClient.ts | 852 + node_modules/@azure/ms-rest-js/lib/url.ts | 659 + .../ms-rest-js/lib/util/base64.browser.ts | 35 + .../@azure/ms-rest-js/lib/util/base64.ts | 29 + .../@azure/ms-rest-js/lib/util/constants.ts | 108 + .../@azure/ms-rest-js/lib/util/utils.ts | 288 + .../@azure/ms-rest-js/lib/util/xml.browser.ts | 149 + .../@azure/ms-rest-js/lib/util/xml.ts | 35 + .../@azure/ms-rest-js/lib/webResource.ts | 668 + .../@azure/ms-rest-js/lib/xhrHttpClient.ts | 178 + .../@azure/ms-rest-js/node_modules/.bin/uuid | 1 + .../ms-rest-js/node_modules/uuid/CHANGELOG.md | 229 + .../node_modules/uuid/CONTRIBUTING.md | 18 + .../ms-rest-js/node_modules/uuid/LICENSE.md | 9 + .../ms-rest-js/node_modules/uuid/README.md | 505 + .../node_modules/uuid/dist/bin/uuid | 2 + .../uuid/dist/esm-browser/index.js | 9 + .../node_modules/uuid/dist/esm-browser/md5.js | 215 + .../node_modules/uuid/dist/esm-browser/nil.js | 1 + .../uuid/dist/esm-browser/parse.js | 35 + .../uuid/dist/esm-browser/regex.js | 1 + .../node_modules/uuid/dist/esm-browser/rng.js | 19 + .../uuid/dist/esm-browser/sha1.js | 96 + .../uuid/dist/esm-browser/stringify.js | 30 + .../node_modules/uuid/dist/esm-browser/v1.js | 95 + .../node_modules/uuid/dist/esm-browser/v3.js | 4 + .../node_modules/uuid/dist/esm-browser/v35.js | 64 + .../node_modules/uuid/dist/esm-browser/v4.js | 24 + .../node_modules/uuid/dist/esm-browser/v5.js | 4 + .../uuid/dist/esm-browser/validate.js | 7 + .../uuid/dist/esm-browser/version.js | 11 + .../node_modules/uuid/dist/esm-node/index.js | 9 + .../node_modules/uuid/dist/esm-node/md5.js | 13 + .../node_modules/uuid/dist/esm-node/nil.js | 1 + .../node_modules/uuid/dist/esm-node/parse.js | 35 + .../node_modules/uuid/dist/esm-node/regex.js | 1 + .../node_modules/uuid/dist/esm-node/rng.js | 12 + .../node_modules/uuid/dist/esm-node/sha1.js | 13 + .../uuid/dist/esm-node/stringify.js | 29 + .../node_modules/uuid/dist/esm-node/v1.js | 95 + .../node_modules/uuid/dist/esm-node/v3.js | 4 + .../node_modules/uuid/dist/esm-node/v35.js | 64 + .../node_modules/uuid/dist/esm-node/v4.js | 24 + .../node_modules/uuid/dist/esm-node/v5.js | 4 + .../uuid/dist/esm-node/validate.js | 7 + .../uuid/dist/esm-node/version.js | 11 + .../node_modules/uuid/dist/index.js | 79 + .../node_modules/uuid/dist/md5-browser.js | 223 + .../ms-rest-js/node_modules/uuid/dist/md5.js | 23 + .../ms-rest-js/node_modules/uuid/dist/nil.js | 8 + .../node_modules/uuid/dist/parse.js | 45 + .../node_modules/uuid/dist/regex.js | 8 + .../node_modules/uuid/dist/rng-browser.js | 26 + .../ms-rest-js/node_modules/uuid/dist/rng.js | 24 + .../node_modules/uuid/dist/sha1-browser.js | 104 + .../ms-rest-js/node_modules/uuid/dist/sha1.js | 23 + .../node_modules/uuid/dist/stringify.js | 39 + .../node_modules/uuid/dist/umd/uuid.min.js | 1 + .../node_modules/uuid/dist/umd/uuidNIL.min.js | 1 + .../uuid/dist/umd/uuidParse.min.js | 1 + .../uuid/dist/umd/uuidStringify.min.js | 1 + .../uuid/dist/umd/uuidValidate.min.js | 1 + .../uuid/dist/umd/uuidVersion.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv1.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv3.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv4.min.js | 1 + .../node_modules/uuid/dist/umd/uuidv5.min.js | 1 + .../node_modules/uuid/dist/uuid-bin.js | 85 + .../ms-rest-js/node_modules/uuid/dist/v1.js | 107 + .../ms-rest-js/node_modules/uuid/dist/v3.js | 16 + .../ms-rest-js/node_modules/uuid/dist/v35.js | 78 + .../ms-rest-js/node_modules/uuid/dist/v4.js | 37 + .../ms-rest-js/node_modules/uuid/dist/v5.js | 16 + .../node_modules/uuid/dist/validate.js | 17 + .../node_modules/uuid/dist/version.js | 21 + .../ms-rest-js/node_modules/uuid/package.json | 135 + .../ms-rest-js/node_modules/uuid/wrapper.mjs | 10 + node_modules/@azure/ms-rest-js/package.json | 181 + node_modules/@azure/storage-blob/CHANGELOG.md | 517 + node_modules/@azure/storage-blob/LICENSE | 21 + node_modules/@azure/storage-blob/README.md | 533 + .../storage-blob/src/BatchResponse.js | 4 + .../storage-blob/src/BatchResponse.js.map | 1 + .../storage-blob/src/BatchResponseParser.js | 136 + .../src/BatchResponseParser.js.map | 1 + .../storage-blob/src/BatchUtils.browser.js | 11 + .../src/BatchUtils.browser.js.map | 1 + .../dist-esm/storage-blob/src/BatchUtils.js | 15 + .../storage-blob/src/BatchUtils.js.map | 1 + .../dist-esm/storage-blob/src/BlobBatch.js | 282 + .../storage-blob/src/BlobBatch.js.map | 1 + .../storage-blob/src/BlobBatchClient.js | 153 + .../storage-blob/src/BlobBatchClient.js.map | 1 + .../src/BlobDownloadResponse.browser.js | 7 + .../src/BlobDownloadResponse.browser.js.map | 1 + .../storage-blob/src/BlobDownloadResponse.js | 455 + .../src/BlobDownloadResponse.js.map | 1 + .../storage-blob/src/BlobLeaseClient.js | 221 + .../storage-blob/src/BlobLeaseClient.js.map | 1 + .../src/BlobQueryResponse.browser.js | 362 + .../src/BlobQueryResponse.browser.js.map | 1 + .../storage-blob/src/BlobQueryResponse.js | 367 + .../storage-blob/src/BlobQueryResponse.js.map | 1 + .../storage-blob/src/BlobServiceClient.js | 786 + .../storage-blob/src/BlobServiceClient.js.map | 1 + .../dist-esm/storage-blob/src/Clients.js | 2751 + .../dist-esm/storage-blob/src/Clients.js.map | 1 + .../storage-blob/src/ContainerClient.js | 1273 + .../storage-blob/src/ContainerClient.js.map | 1 + .../storage-blob/src/PageBlobRangeResponse.js | 24 + .../src/PageBlobRangeResponse.js.map | 1 + .../dist-esm/storage-blob/src/Pipeline.js | 107 + .../dist-esm/storage-blob/src/Pipeline.js.map | 1 + .../dist-esm/storage-blob/src/Range.js | 21 + .../dist-esm/storage-blob/src/Range.js.map | 1 + .../src/StorageBrowserPolicyFactory.js | 19 + .../src/StorageBrowserPolicyFactory.js.map | 1 + .../storage-blob/src/StorageClient.js | 42 + .../storage-blob/src/StorageClient.js.map | 1 + .../src/StorageRetryPolicyFactory.js | 26 + .../src/StorageRetryPolicyFactory.js.map | 1 + .../src/TelemetryPolicyFactory.js | 50 + .../src/TelemetryPolicyFactory.js.map | 1 + .../src/credentials/AnonymousCredential.js | 22 + .../credentials/AnonymousCredential.js.map | 1 + .../src/credentials/Credential.js | 18 + .../src/credentials/Credential.js.map | 1 + .../StorageSharedKeyCredential.browser.js | 5 + .../StorageSharedKeyCredential.browser.js.map | 1 + .../credentials/StorageSharedKeyCredential.js | 40 + .../StorageSharedKeyCredential.js.map | 1 + .../UserDelegationKeyCredential.browser.js | 5 + ...UserDelegationKeyCredential.browser.js.map | 1 + .../UserDelegationKeyCredential.js | 31 + .../UserDelegationKeyCredential.js.map | 1 + .../storage-blob/src/generated/src/index.js | 11 + .../src/generated/src/index.js.map | 1 + .../src/generated/src/models/index.js | 9 + .../src/generated/src/models/index.js.map | 1 + .../src/generated/src/models/mappers.js | 8192 +++ .../src/generated/src/models/mappers.js.map | 1 + .../src/generated/src/models/parameters.js | 1608 + .../generated/src/models/parameters.js.map | 1 + .../generated/src/operations/appendBlob.js | 237 + .../src/operations/appendBlob.js.map | 1 + .../src/generated/src/operations/blob.js | 1079 + .../src/generated/src/operations/blob.js.map | 1 + .../src/generated/src/operations/blockBlob.js | 391 + .../generated/src/operations/blockBlob.js.map | 1 + .../src/generated/src/operations/container.js | 769 + .../generated/src/operations/container.js.map | 1 + .../src/generated/src/operations/index.js | 14 + .../src/generated/src/operations/index.js.map | 1 + .../src/generated/src/operations/pageBlob.js | 494 + .../generated/src/operations/pageBlob.js.map | 1 + .../src/generated/src/operations/service.js | 345 + .../generated/src/operations/service.js.map | 1 + .../src/generated/src/storageClient.js | 27 + .../src/generated/src/storageClient.js.map | 1 + .../src/generated/src/storageClientContext.js | 39 + .../generated/src/storageClientContext.js.map | 1 + .../storage-blob/src/generatedModels.js | 4 + .../storage-blob/src/generatedModels.js.map | 1 + .../storage-blob/src/index.browser.js | 22 + .../storage-blob/src/index.browser.js.map | 1 + .../dist-esm/storage-blob/src/index.js | 32 + .../dist-esm/storage-blob/src/index.js.map | 1 + .../dist-esm/storage-blob/src/log.js | 8 + .../dist-esm/storage-blob/src/log.js.map | 1 + .../dist-esm/storage-blob/src/models.js | 104 + .../dist-esm/storage-blob/src/models.js.map | 1 + .../src/policies/AnonymousCredentialPolicy.js | 20 + .../policies/AnonymousCredentialPolicy.js.map | 1 + .../src/policies/CredentialPolicy.js | 29 + .../src/policies/CredentialPolicy.js.map | 1 + ...earerTokenChallengeAuthenticationPolicy.js | 245 + ...rTokenChallengeAuthenticationPolicy.js.map | 1 + .../src/policies/StorageBrowserPolicy.js | 46 + .../src/policies/StorageBrowserPolicy.js.map | 1 + .../src/policies/StorageRetryPolicy.js | 214 + .../src/policies/StorageRetryPolicy.js.map | 1 + .../StorageSharedKeyCredentialPolicy.js | 140 + .../StorageSharedKeyCredentialPolicy.js.map | 1 + .../src/policies/TelemetryPolicy.js | 36 + .../src/policies/TelemetryPolicy.js.map | 1 + .../src/pollers/BlobStartCopyFromUrlPoller.js | 130 + .../pollers/BlobStartCopyFromUrlPoller.js.map | 1 + .../src/sas/AccountSASPermissions.js | 227 + .../src/sas/AccountSASPermissions.js.map | 1 + .../src/sas/AccountSASResourceTypes.js | 72 + .../src/sas/AccountSASResourceTypes.js.map | 1 + .../src/sas/AccountSASServices.js | 80 + .../src/sas/AccountSASServices.js.map | 1 + .../src/sas/AccountSASSignatureValues.js | 93 + .../src/sas/AccountSASSignatureValues.js.map | 1 + .../src/sas/BlobSASPermissions.js | 195 + .../src/sas/BlobSASPermissions.js.map | 1 + .../src/sas/BlobSASSignatureValues.js | 555 + .../src/sas/BlobSASSignatureValues.js.map | 1 + .../src/sas/ContainerSASPermissions.js | 221 + .../src/sas/ContainerSASPermissions.js.map | 1 + .../src/sas/SASQueryParameters.js | 234 + .../src/sas/SASQueryParameters.js.map | 1 + .../storage-blob/src/sas/SasIPRange.js | 13 + .../storage-blob/src/sas/SasIPRange.js.map | 1 + .../dist-esm/storage-blob/src/utils/Batch.js | 122 + .../storage-blob/src/utils/Batch.js.map | 1 + .../src/utils/BlobQuickQueryStream.js | 111 + .../src/utils/BlobQuickQueryStream.js.map | 1 + .../dist-esm/storage-blob/src/utils/Mutex.js | 65 + .../storage-blob/src/utils/Mutex.js.map | 1 + .../src/utils/RetriableReadableStream.js | 111 + .../src/utils/RetriableReadableStream.js.map | 1 + .../dist-esm/storage-blob/src/utils/cache.js | 8 + .../storage-blob/src/utils/cache.js.map | 1 + .../storage-blob/src/utils/constants.js | 198 + .../storage-blob/src/utils/constants.js.map | 1 + .../storage-blob/src/utils/tracing.js | 27 + .../storage-blob/src/utils/tracing.js.map | 1 + .../storage-blob/src/utils/utils.browser.js | 48 + .../src/utils/utils.browser.js.map | 1 + .../storage-blob/src/utils/utils.common.js | 992 + .../src/utils/utils.common.js.map | 1 + .../storage-blob/src/utils/utils.node.js | 125 + .../storage-blob/src/utils/utils.node.js.map | 1 + .../src/BufferScheduler.browser.js | 8 + .../src/BufferScheduler.browser.js.map | 1 + .../storage-common/src/BufferScheduler.js | 252 + .../storage-common/src/BufferScheduler.js.map | 1 + .../storage-common/src/BuffersStream.js | 83 + .../storage-common/src/BuffersStream.js.map | 1 + .../storage-common/src/PooledBuffer.js | 87 + .../storage-common/src/PooledBuffer.js.map | 1 + .../storage-common/src/index.browser.js | 4 + .../storage-common/src/index.browser.js.map | 1 + .../dist-esm/storage-common/src/index.js | 4 + .../dist-esm/storage-common/src/index.js.map | 1 + .../src/AvroConstants.js | 7 + .../src/AvroConstants.js.map | 1 + .../storage-internal-avro/src/AvroParser.js | 311 + .../src/AvroParser.js.map | 1 + .../storage-internal-avro/src/AvroReadable.js | 5 + .../src/AvroReadable.js.map | 1 + .../src/AvroReadableFromBlob.js | 47 + .../src/AvroReadableFromBlob.js.map | 1 + .../src/AvroReadableFromStream.js | 84 + .../src/AvroReadableFromStream.js.map | 1 + .../storage-internal-avro/src/AvroReader.js | 107 + .../src/AvroReader.js.map | 1 + .../src/index.browser.js | 6 + .../src/index.browser.js.map | 1 + .../storage-internal-avro/src/index.js | 6 + .../storage-internal-avro/src/index.js.map | 1 + .../src/utils/utils.common.js | 17 + .../src/utils/utils.common.js.map | 1 + .../@azure/storage-blob/dist/index.js | 25339 +++++++ .../@azure/storage-blob/dist/index.js.map | 1 + .../node_modules/tslib/CopyrightNotice.txt | 15 + .../node_modules/tslib/LICENSE.txt | 12 + .../storage-blob/node_modules/tslib/README.md | 164 + .../node_modules/tslib/modules/index.js | 55 + .../node_modules/tslib/modules/package.json | 3 + .../node_modules/tslib/package.json | 38 + .../node_modules/tslib/tslib.d.ts | 398 + .../node_modules/tslib/tslib.es6.html | 1 + .../node_modules/tslib/tslib.es6.js | 248 + .../node_modules/tslib/tslib.html | 1 + .../storage-blob/node_modules/tslib/tslib.js | 317 + node_modules/@azure/storage-blob/package.json | 187 + .../storage-blob/types/3.1/storage-blob.d.ts | 9345 +++ .../types/latest/storage-blob.d.ts | 9716 +++ node_modules/@opentelemetry/api/CHANGELOG.md | 204 + node_modules/@opentelemetry/api/LICENSE | 201 + node_modules/@opentelemetry/api/README.md | 140 + .../api/build/esm/api/context.d.ts | 41 + .../api/build/esm/api/context.js | 90 + .../api/build/esm/api/context.js.map | 1 + .../api/build/esm/api/diag.d.ts | 38 + .../@opentelemetry/api/build/esm/api/diag.js | 90 + .../api/build/esm/api/diag.js.map | 1 + .../api/build/esm/api/propagation.d.ts | 48 + .../api/build/esm/api/propagation.js | 88 + .../api/build/esm/api/propagation.js.map | 1 + .../api/build/esm/api/trace.d.ts | 39 + .../@opentelemetry/api/build/esm/api/trace.js | 76 + .../api/build/esm/api/trace.js.map | 1 + .../build/esm/baggage/context-helpers.d.ts | 23 + .../api/build/esm/baggage/context-helpers.js | 47 + .../build/esm/baggage/context-helpers.js.map | 1 + .../esm/baggage/internal/baggage-impl.d.ts | 12 + .../esm/baggage/internal/baggage-impl.js | 61 + .../esm/baggage/internal/baggage-impl.js.map | 1 + .../build/esm/baggage/internal/symbol.d.ts | 5 + .../api/build/esm/baggage/internal/symbol.js | 20 + .../build/esm/baggage/internal/symbol.js.map | 1 + .../api/build/esm/baggage/types.d.ts | 60 + .../api/build/esm/baggage/types.js | 17 + .../api/build/esm/baggage/types.js.map | 1 + .../api/build/esm/baggage/utils.d.ts | 15 + .../api/build/esm/baggage/utils.js | 47 + .../api/build/esm/baggage/utils.js.map | 1 + .../api/build/esm/common/Attributes.d.ts | 15 + .../api/build/esm/common/Attributes.js | 17 + .../api/build/esm/common/Attributes.js.map | 1 + .../api/build/esm/common/Exception.d.ts | 26 + .../api/build/esm/common/Exception.js | 17 + .../api/build/esm/common/Exception.js.map | 1 + .../api/build/esm/common/Time.d.ts | 20 + .../api/build/esm/common/Time.js | 2 + .../api/build/esm/common/Time.js.map | 1 + .../build/esm/context/NoopContextManager.d.ts | 9 + .../build/esm/context/NoopContextManager.js | 47 + .../esm/context/NoopContextManager.js.map | 1 + .../api/build/esm/context/context.d.ts | 6 + .../api/build/esm/context/context.js | 52 + .../api/build/esm/context/context.js.map | 1 + .../api/build/esm/context/types.d.ts | 52 + .../api/build/esm/context/types.js | 17 + .../api/build/esm/context/types.js.map | 1 + .../api/build/esm/diag/ComponentLogger.d.ts | 20 + .../api/build/esm/diag/ComponentLogger.js | 77 + .../api/build/esm/diag/ComponentLogger.js.map | 1 + .../api/build/esm/diag/consoleLogger.d.ts | 38 + .../api/build/esm/diag/consoleLogger.js | 59 + .../api/build/esm/diag/consoleLogger.js.map | 1 + .../api/build/esm/diag/index.d.ts | 3 + .../api/build/esm/diag/index.js | 18 + .../api/build/esm/diag/index.js.map | 1 + .../esm/diag/internal/logLevelLogger.d.ts | 3 + .../build/esm/diag/internal/logLevelLogger.js | 41 + .../esm/diag/internal/logLevelLogger.js.map | 1 + .../build/esm/diag/internal/noopLogger.d.ts | 8 + .../api/build/esm/diag/internal/noopLogger.js | 31 + .../build/esm/diag/internal/noopLogger.js.map | 1 + .../api/build/esm/diag/types.d.ts | 70 + .../api/build/esm/diag/types.js | 41 + .../api/build/esm/diag/types.js.map | 1 + .../@opentelemetry/api/build/esm/index.d.ts | 57 + .../@opentelemetry/api/build/esm/index.js | 67 + .../@opentelemetry/api/build/esm/index.js.map | 1 + .../api/build/esm/internal/global-utils.d.ts | 16 + .../api/build/esm/internal/global-utils.js | 59 + .../build/esm/internal/global-utils.js.map | 1 + .../api/build/esm/internal/semver.d.ts | 34 + .../api/build/esm/internal/semver.js | 118 + .../api/build/esm/internal/semver.js.map | 1 + .../esm/platform/browser/globalThis.d.ts | 10 + .../build/esm/platform/browser/globalThis.js | 32 + .../esm/platform/browser/globalThis.js.map | 1 + .../api/build/esm/platform/browser/index.d.ts | 2 + .../api/build/esm/platform/browser/index.js | 17 + .../build/esm/platform/browser/index.js.map | 1 + .../api/build/esm/platform/index.d.ts | 2 + .../api/build/esm/platform/index.js | 17 + .../api/build/esm/platform/index.js.map | 1 + .../build/esm/platform/node/globalThis.d.ts | 3 + .../api/build/esm/platform/node/globalThis.js | 19 + .../build/esm/platform/node/globalThis.js.map | 1 + .../api/build/esm/platform/node/index.d.ts | 2 + .../api/build/esm/platform/node/index.js | 17 + .../api/build/esm/platform/node/index.js.map | 1 + .../propagation/NoopTextMapPropagator.d.ts | 13 + .../esm/propagation/NoopTextMapPropagator.js | 34 + .../propagation/NoopTextMapPropagator.js.map | 1 + .../esm/propagation/TextMapPropagator.d.ts | 84 + .../esm/propagation/TextMapPropagator.js | 38 + .../esm/propagation/TextMapPropagator.js.map | 1 + .../api/build/esm/trace/NonRecordingSpan.d.ts | 25 + .../api/build/esm/trace/NonRecordingSpan.js | 62 + .../build/esm/trace/NonRecordingSpan.js.map | 1 + .../api/build/esm/trace/NoopTracer.d.ts | 14 + .../api/build/esm/trace/NoopTracer.js | 75 + .../api/build/esm/trace/NoopTracer.js.map | 1 + .../build/esm/trace/NoopTracerProvider.d.ts | 13 + .../api/build/esm/trace/NoopTracerProvider.js | 32 + .../build/esm/trace/NoopTracerProvider.js.map | 1 + .../api/build/esm/trace/ProxyTracer.d.ts | 27 + .../api/build/esm/trace/ProxyTracer.js | 53 + .../api/build/esm/trace/ProxyTracer.js.map | 1 + .../build/esm/trace/ProxyTracerProvider.d.ts | 25 + .../build/esm/trace/ProxyTracerProvider.js | 54 + .../esm/trace/ProxyTracerProvider.js.map | 1 + .../api/build/esm/trace/Sampler.d.ts | 30 + .../api/build/esm/trace/Sampler.js | 17 + .../api/build/esm/trace/Sampler.js.map | 1 + .../api/build/esm/trace/SamplingResult.d.ts | 39 + .../api/build/esm/trace/SamplingResult.js | 38 + .../api/build/esm/trace/SamplingResult.js.map | 1 + .../api/build/esm/trace/SpanOptions.d.ts | 23 + .../api/build/esm/trace/SpanOptions.js | 17 + .../api/build/esm/trace/SpanOptions.js.map | 1 + .../api/build/esm/trace/attributes.d.ts | 10 + .../api/build/esm/trace/attributes.js | 17 + .../api/build/esm/trace/attributes.js.map | 1 + .../api/build/esm/trace/context-utils.d.ts | 37 + .../api/build/esm/trace/context-utils.js | 66 + .../api/build/esm/trace/context-utils.js.map | 1 + .../esm/trace/internal/tracestate-impl.d.ts | 22 + .../esm/trace/internal/tracestate-impl.js | 102 + .../esm/trace/internal/tracestate-impl.js.map | 1 + .../trace/internal/tracestate-validators.d.ts | 15 + .../trace/internal/tracestate-validators.js | 41 + .../internal/tracestate-validators.js.map | 1 + .../api/build/esm/trace/internal/utils.d.ts | 3 + .../api/build/esm/trace/internal/utils.js | 20 + .../api/build/esm/trace/internal/utils.js.map | 1 + .../esm/trace/invalid-span-constants.d.ts | 5 + .../build/esm/trace/invalid-span-constants.js | 24 + .../esm/trace/invalid-span-constants.js.map | 1 + .../api/build/esm/trace/link.d.ts | 24 + .../api/build/esm/trace/link.js | 17 + .../api/build/esm/trace/link.js.map | 1 + .../api/build/esm/trace/span.d.ts | 101 + .../api/build/esm/trace/span.js | 17 + .../api/build/esm/trace/span.js.map | 1 + .../api/build/esm/trace/span_context.d.ts | 53 + .../api/build/esm/trace/span_context.js | 17 + .../api/build/esm/trace/span_context.js.map | 1 + .../api/build/esm/trace/span_kind.d.ts | 27 + .../api/build/esm/trace/span_kind.js | 43 + .../api/build/esm/trace/span_kind.js.map | 1 + .../build/esm/trace/spancontext-utils.d.ts | 17 + .../api/build/esm/trace/spancontext-utils.js | 42 + .../build/esm/trace/spancontext-utils.js.map | 1 + .../api/build/esm/trace/status.d.ts | 25 + .../api/build/esm/trace/status.js | 20 + .../api/build/esm/trace/status.js.map | 1 + .../api/build/esm/trace/trace_flags.d.ts | 7 + .../api/build/esm/trace/trace_flags.js | 23 + .../api/build/esm/trace/trace_flags.js.map | 1 + .../api/build/esm/trace/trace_state.d.ts | 38 + .../api/build/esm/trace/trace_state.js | 17 + .../api/build/esm/trace/trace_state.js.map | 1 + .../api/build/esm/trace/tracer.d.ts | 71 + .../api/build/esm/trace/tracer.js | 17 + .../api/build/esm/trace/tracer.js.map | 1 + .../api/build/esm/trace/tracer_options.d.ts | 10 + .../api/build/esm/trace/tracer_options.js | 17 + .../api/build/esm/trace/tracer_options.js.map | 1 + .../api/build/esm/trace/tracer_provider.d.ts | 21 + .../api/build/esm/trace/tracer_provider.js | 17 + .../build/esm/trace/tracer_provider.js.map | 1 + .../@opentelemetry/api/build/esm/version.d.ts | 2 + .../@opentelemetry/api/build/esm/version.js | 18 + .../api/build/esm/version.js.map | 1 + .../api/build/src/api/context.d.ts | 41 + .../api/build/src/api/context.js | 93 + .../api/build/src/api/context.js.map | 1 + .../api/build/src/api/diag.d.ts | 38 + .../@opentelemetry/api/build/src/api/diag.js | 93 + .../api/build/src/api/diag.js.map | 1 + .../api/build/src/api/propagation.d.ts | 48 + .../api/build/src/api/propagation.js | 91 + .../api/build/src/api/propagation.js.map | 1 + .../api/build/src/api/trace.d.ts | 39 + .../@opentelemetry/api/build/src/api/trace.js | 79 + .../api/build/src/api/trace.js.map | 1 + .../build/src/baggage/context-helpers.d.ts | 23 + .../api/build/src/baggage/context-helpers.js | 53 + .../build/src/baggage/context-helpers.js.map | 1 + .../src/baggage/internal/baggage-impl.d.ts | 12 + .../src/baggage/internal/baggage-impl.js | 64 + .../src/baggage/internal/baggage-impl.js.map | 1 + .../build/src/baggage/internal/symbol.d.ts | 5 + .../api/build/src/baggage/internal/symbol.js | 23 + .../build/src/baggage/internal/symbol.js.map | 1 + .../api/build/src/baggage/types.d.ts | 60 + .../api/build/src/baggage/types.js | 18 + .../api/build/src/baggage/types.js.map | 1 + .../api/build/src/baggage/utils.d.ts | 15 + .../api/build/src/baggage/utils.js | 52 + .../api/build/src/baggage/utils.js.map | 1 + .../api/build/src/common/Attributes.d.ts | 15 + .../api/build/src/common/Attributes.js | 18 + .../api/build/src/common/Attributes.js.map | 1 + .../api/build/src/common/Exception.d.ts | 26 + .../api/build/src/common/Exception.js | 18 + .../api/build/src/common/Exception.js.map | 1 + .../api/build/src/common/Time.d.ts | 20 + .../api/build/src/common/Time.js | 3 + .../api/build/src/common/Time.js.map | 1 + .../build/src/context/NoopContextManager.d.ts | 9 + .../build/src/context/NoopContextManager.js | 50 + .../src/context/NoopContextManager.js.map | 1 + .../api/build/src/context/context.d.ts | 6 + .../api/build/src/context/context.js | 56 + .../api/build/src/context/context.js.map | 1 + .../api/build/src/context/types.d.ts | 52 + .../api/build/src/context/types.js | 18 + .../api/build/src/context/types.js.map | 1 + .../api/build/src/diag/ComponentLogger.d.ts | 20 + .../api/build/src/diag/ComponentLogger.js | 80 + .../api/build/src/diag/ComponentLogger.js.map | 1 + .../api/build/src/diag/consoleLogger.d.ts | 38 + .../api/build/src/diag/consoleLogger.js | 62 + .../api/build/src/diag/consoleLogger.js.map | 1 + .../api/build/src/diag/index.d.ts | 3 + .../api/build/src/diag/index.js | 30 + .../api/build/src/diag/index.js.map | 1 + .../src/diag/internal/logLevelLogger.d.ts | 3 + .../build/src/diag/internal/logLevelLogger.js | 45 + .../src/diag/internal/logLevelLogger.js.map | 1 + .../build/src/diag/internal/noopLogger.d.ts | 8 + .../api/build/src/diag/internal/noopLogger.js | 35 + .../build/src/diag/internal/noopLogger.js.map | 1 + .../api/build/src/diag/types.d.ts | 70 + .../api/build/src/diag/types.js | 44 + .../api/build/src/diag/types.js.map | 1 + .../@opentelemetry/api/build/src/index.d.ts | 57 + .../@opentelemetry/api/build/src/index.js | 88 + .../@opentelemetry/api/build/src/index.js.map | 1 + .../api/build/src/internal/global-utils.d.ts | 16 + .../api/build/src/internal/global-utils.js | 65 + .../build/src/internal/global-utils.js.map | 1 + .../api/build/src/internal/semver.d.ts | 34 + .../api/build/src/internal/semver.js | 122 + .../api/build/src/internal/semver.js.map | 1 + .../src/platform/browser/globalThis.d.ts | 10 + .../build/src/platform/browser/globalThis.js | 35 + .../src/platform/browser/globalThis.js.map | 1 + .../api/build/src/platform/browser/index.d.ts | 2 + .../api/build/src/platform/browser/index.js | 29 + .../build/src/platform/browser/index.js.map | 1 + .../api/build/src/platform/index.d.ts | 2 + .../api/build/src/platform/index.js | 29 + .../api/build/src/platform/index.js.map | 1 + .../build/src/platform/node/globalThis.d.ts | 3 + .../api/build/src/platform/node/globalThis.js | 22 + .../build/src/platform/node/globalThis.js.map | 1 + .../api/build/src/platform/node/index.d.ts | 2 + .../api/build/src/platform/node/index.js | 29 + .../api/build/src/platform/node/index.js.map | 1 + .../propagation/NoopTextMapPropagator.d.ts | 13 + .../src/propagation/NoopTextMapPropagator.js | 37 + .../propagation/NoopTextMapPropagator.js.map | 1 + .../src/propagation/TextMapPropagator.d.ts | 84 + .../src/propagation/TextMapPropagator.js | 41 + .../src/propagation/TextMapPropagator.js.map | 1 + .../api/build/src/trace/NonRecordingSpan.d.ts | 25 + .../api/build/src/trace/NonRecordingSpan.js | 65 + .../build/src/trace/NonRecordingSpan.js.map | 1 + .../api/build/src/trace/NoopTracer.d.ts | 14 + .../api/build/src/trace/NoopTracer.js | 78 + .../api/build/src/trace/NoopTracer.js.map | 1 + .../build/src/trace/NoopTracerProvider.d.ts | 13 + .../api/build/src/trace/NoopTracerProvider.js | 35 + .../build/src/trace/NoopTracerProvider.js.map | 1 + .../api/build/src/trace/ProxyTracer.d.ts | 27 + .../api/build/src/trace/ProxyTracer.js | 56 + .../api/build/src/trace/ProxyTracer.js.map | 1 + .../build/src/trace/ProxyTracerProvider.d.ts | 25 + .../build/src/trace/ProxyTracerProvider.js | 57 + .../src/trace/ProxyTracerProvider.js.map | 1 + .../api/build/src/trace/Sampler.d.ts | 30 + .../api/build/src/trace/Sampler.js | 18 + .../api/build/src/trace/Sampler.js.map | 1 + .../api/build/src/trace/SamplingResult.d.ts | 39 + .../api/build/src/trace/SamplingResult.js | 41 + .../api/build/src/trace/SamplingResult.js.map | 1 + .../api/build/src/trace/SpanOptions.d.ts | 23 + .../api/build/src/trace/SpanOptions.js | 18 + .../api/build/src/trace/SpanOptions.js.map | 1 + .../api/build/src/trace/attributes.d.ts | 10 + .../api/build/src/trace/attributes.js | 18 + .../api/build/src/trace/attributes.js.map | 1 + .../api/build/src/trace/context-utils.d.ts | 37 + .../api/build/src/trace/context-utils.js | 74 + .../api/build/src/trace/context-utils.js.map | 1 + .../src/trace/internal/tracestate-impl.d.ts | 22 + .../src/trace/internal/tracestate-impl.js | 105 + .../src/trace/internal/tracestate-impl.js.map | 1 + .../trace/internal/tracestate-validators.d.ts | 15 + .../trace/internal/tracestate-validators.js | 46 + .../internal/tracestate-validators.js.map | 1 + .../api/build/src/trace/internal/utils.d.ts | 3 + .../api/build/src/trace/internal/utils.js | 24 + .../api/build/src/trace/internal/utils.js.map | 1 + .../src/trace/invalid-span-constants.d.ts | 5 + .../build/src/trace/invalid-span-constants.js | 27 + .../src/trace/invalid-span-constants.js.map | 1 + .../api/build/src/trace/link.d.ts | 24 + .../api/build/src/trace/link.js | 18 + .../api/build/src/trace/link.js.map | 1 + .../api/build/src/trace/span.d.ts | 101 + .../api/build/src/trace/span.js | 18 + .../api/build/src/trace/span.js.map | 1 + .../api/build/src/trace/span_context.d.ts | 53 + .../api/build/src/trace/span_context.js | 18 + .../api/build/src/trace/span_context.js.map | 1 + .../api/build/src/trace/span_kind.d.ts | 27 + .../api/build/src/trace/span_kind.js | 46 + .../api/build/src/trace/span_kind.js.map | 1 + .../build/src/trace/spancontext-utils.d.ts | 17 + .../api/build/src/trace/spancontext-utils.js | 49 + .../build/src/trace/spancontext-utils.js.map | 1 + .../api/build/src/trace/status.d.ts | 25 + .../api/build/src/trace/status.js | 23 + .../api/build/src/trace/status.js.map | 1 + .../api/build/src/trace/trace_flags.d.ts | 7 + .../api/build/src/trace/trace_flags.js | 26 + .../api/build/src/trace/trace_flags.js.map | 1 + .../api/build/src/trace/trace_state.d.ts | 38 + .../api/build/src/trace/trace_state.js | 18 + .../api/build/src/trace/trace_state.js.map | 1 + .../api/build/src/trace/tracer.d.ts | 71 + .../api/build/src/trace/tracer.js | 18 + .../api/build/src/trace/tracer.js.map | 1 + .../api/build/src/trace/tracer_options.d.ts | 10 + .../api/build/src/trace/tracer_options.js | 18 + .../api/build/src/trace/tracer_options.js.map | 1 + .../api/build/src/trace/tracer_provider.d.ts | 21 + .../api/build/src/trace/tracer_provider.js | 18 + .../build/src/trace/tracer_provider.js.map | 1 + .../@opentelemetry/api/build/src/version.d.ts | 2 + .../@opentelemetry/api/build/src/version.js | 21 + .../api/build/src/version.js.map | 1 + node_modules/@opentelemetry/api/package.json | 90 + node_modules/@types/node-fetch/LICENSE | 21 + node_modules/@types/node-fetch/README.md | 16 + node_modules/@types/node-fetch/externals.d.ts | 21 + node_modules/@types/node-fetch/index.d.ts | 224 + .../node-fetch/node_modules/form-data/License | 19 + .../node_modules/form-data/README.md.bak | 356 + .../node_modules/form-data/Readme.md | 356 + .../node_modules/form-data/index.d.ts | 62 + .../node_modules/form-data/lib/browser.js | 2 + .../node_modules/form-data/lib/form_data.js | 498 + .../node_modules/form-data/lib/populate.js | 10 + .../node_modules/form-data/package.json | 68 + node_modules/@types/node-fetch/package.json | 83 + node_modules/@types/tunnel/LICENSE | 21 + node_modules/@types/tunnel/README.md | 67 + node_modules/@types/tunnel/index.d.ts | 47 + node_modules/@types/tunnel/package.json | 27 + node_modules/abort-controller/LICENSE | 21 + node_modules/abort-controller/README.md | 98 + node_modules/abort-controller/browser.js | 13 + node_modules/abort-controller/browser.mjs | 11 + .../dist/abort-controller.d.ts | 43 + .../abort-controller/dist/abort-controller.js | 127 + .../dist/abort-controller.js.map | 1 + .../dist/abort-controller.mjs | 118 + .../dist/abort-controller.mjs.map | 1 + .../dist/abort-controller.umd.js | 5 + .../dist/abort-controller.umd.js.map | 1 + node_modules/abort-controller/package.json | 97 + node_modules/abort-controller/polyfill.js | 21 + node_modules/abort-controller/polyfill.mjs | 19 + node_modules/asynckit/LICENSE | 21 + node_modules/asynckit/README.md | 233 + node_modules/asynckit/bench.js | 76 + node_modules/asynckit/index.js | 6 + node_modules/asynckit/lib/abort.js | 29 + node_modules/asynckit/lib/async.js | 34 + node_modules/asynckit/lib/defer.js | 26 + node_modules/asynckit/lib/iterate.js | 75 + .../asynckit/lib/readable_asynckit.js | 91 + .../asynckit/lib/readable_parallel.js | 25 + node_modules/asynckit/lib/readable_serial.js | 25 + .../asynckit/lib/readable_serial_ordered.js | 29 + node_modules/asynckit/lib/state.js | 37 + node_modules/asynckit/lib/streamify.js | 141 + node_modules/asynckit/lib/terminator.js | 29 + node_modules/asynckit/package.json | 63 + node_modules/asynckit/parallel.js | 43 + node_modules/asynckit/serial.js | 17 + node_modules/asynckit/serialOrdered.js | 75 + node_modules/asynckit/stream.js | 21 + .../balanced-match/.github/FUNDING.yml | 2 + node_modules/balanced-match/LICENSE.md | 21 + node_modules/balanced-match/README.md | 97 + node_modules/balanced-match/index.js | 62 + node_modules/balanced-match/package.json | 48 + node_modules/brace-expansion/LICENSE | 21 + node_modules/brace-expansion/README.md | 129 + node_modules/brace-expansion/index.js | 201 + node_modules/brace-expansion/package.json | 47 + node_modules/cache/.eslintrc.json | 22 + node_modules/cache/.gitattributes | 2 + node_modules/cache/.github/CODEOWNERS | 1 + node_modules/cache/.github/auto_assign.yml | 17 + .../.github/workflows/auto-assign-issues.yml | 15 + .../cache/.github/workflows/auto-assign.yml | 10 + .../cache/.github/workflows/check-dist.yml | 49 + .../workflows/close-inactive-issues.yml | 22 + .../cache/.github/workflows/codeql.yml | 52 + .../cache/.github/workflows/licensed.yml | 24 + .../cache/.github/workflows/workflow.yml | 143 + node_modules/cache/.licensed.yml | 16 + .../.licenses/npm/@actions/cache.dep.yml | 20 + .../cache/.licenses/npm/@actions/core.dep.yml | 20 + .../cache/.licenses/npm/@actions/exec.dep.yml | 20 + .../cache/.licenses/npm/@actions/glob.dep.yml | 20 + .../npm/@actions/http-client.dep.yml | 32 + .../cache/.licenses/npm/@actions/io.dep.yml | 20 + .../npm/@azure/abort-controller.dep.yml | 32 + .../core-asynciterator-polyfill.dep.yml | 32 + .../.licenses/npm/@azure/core-auth.dep.yml | 33 + .../.licenses/npm/@azure/core-http.dep.yml | 33 + .../.licenses/npm/@azure/core-lro.dep.yml | 32 + .../.licenses/npm/@azure/core-paging.dep.yml | 32 + .../.licenses/npm/@azure/core-tracing.dep.yml | 32 + .../cache/.licenses/npm/@azure/logger.dep.yml | 32 + .../.licenses/npm/@azure/ms-rest-js.dep.yml | 32 + .../.licenses/npm/@azure/storage-blob.dep.yml | 32 + .../.licenses/npm/@opentelemetry/api.dep.yml | 225 + .../.licenses/npm/@types/node-fetch.dep.yml | 32 + .../cache/.licenses/npm/@types/node.dep.yml | 32 + .../cache/.licenses/npm/@types/tunnel.dep.yml | 32 + .../.licenses/npm/abort-controller.dep.yml | 32 + .../cache/.licenses/npm/asynckit.dep.yml | 34 + .../.licenses/npm/balanced-match.dep.yml | 55 + .../.licenses/npm/brace-expansion.dep.yml | 55 + .../.licenses/npm/combined-stream.dep.yml | 32 + .../cache/.licenses/npm/concat-map.dep.yml | 31 + .../.licenses/npm/delayed-stream.dep.yml | 32 + .../.licenses/npm/event-target-shim.dep.yml | 33 + .../cache/.licenses/npm/events.dep.yml | 38 + .../.licenses/npm/form-data-2.5.1.dep.yml | 33 + .../.licenses/npm/form-data-3.0.1.dep.yml | 33 + .../.licenses/npm/form-data-4.0.0.dep.yml | 33 + .../cache/.licenses/npm/ip-regex.dep.yml | 34 + .../cache/.licenses/npm/mime-db.dep.yml | 33 + .../cache/.licenses/npm/mime-types.dep.yml | 47 + .../cache/.licenses/npm/minimatch.dep.yml | 26 + .../cache/.licenses/npm/node-fetch.dep.yml | 56 + .../cache/.licenses/npm/process.dep.yml | 33 + node_modules/cache/.licenses/npm/psl.dep.yml | 43 + .../cache/.licenses/npm/punycode.dep.yml | 34 + node_modules/cache/.licenses/npm/sax.dep.yml | 52 + .../cache/.licenses/npm/semver.dep.yml | 26 + .../.licenses/npm/tough-cookie-3.0.1.dep.yml | 23 + .../.licenses/npm/tough-cookie-4.0.0.dep.yml | 23 + node_modules/cache/.licenses/npm/tr46.dep.yml | 30 + .../cache/.licenses/npm/tslib-1.14.1.dep.yml | 35 + .../cache/.licenses/npm/tslib-2.3.1.dep.yml | 35 + .../cache/.licenses/npm/tunnel.dep.yml | 35 + .../cache/.licenses/npm/universalify.dep.yml | 33 + .../cache/.licenses/npm/uuid-3.4.0.dep.yml | 39 + .../cache/.licenses/npm/uuid-8.3.2.dep.yml | 20 + .../.licenses/npm/webidl-conversions.dep.yml | 23 + .../cache/.licenses/npm/whatwg-url.dep.yml | 32 + .../cache/.licenses/npm/xml2js.dep.yml | 30 + .../cache/.licenses/npm/xmlbuilder.dep.yml | 24 + node_modules/cache/.prettierrc.json | 11 + node_modules/cache/CODE_OF_CONDUCT.md | 76 + node_modules/cache/CONTRIBUTING.md | 38 + node_modules/cache/LICENSE | 22 + node_modules/cache/README.md | 184 + node_modules/cache/RELEASES.md | 8 + .../__tests__/__fixtures__/helloWorld.txt | 1 + .../cache/__tests__/actionUtils.test.ts | 274 + .../cache/__tests__/create-cache-files.sh | 17 + node_modules/cache/__tests__/restore.test.ts | 360 + node_modules/cache/__tests__/save.test.ts | 392 + .../cache/__tests__/verify-cache-files.sh | 36 + node_modules/cache/action.yml | 27 + node_modules/cache/dist/restore/index.js | 60410 +++++++++++++++ node_modules/cache/dist/save/index.js | 60413 ++++++++++++++++ node_modules/cache/examples.md | 609 + node_modules/cache/jest.config.js | 23 + node_modules/cache/package.json | 51 + node_modules/cache/src/constants.ts | 23 + node_modules/cache/src/restore.ts | 71 + node_modules/cache/src/save.ts | 69 + node_modules/cache/src/utils/actionUtils.ts | 94 + node_modules/cache/src/utils/testUtils.ts | 30 + node_modules/cache/tsconfig.json | 63 + node_modules/combined-stream/License | 19 + node_modules/combined-stream/Readme.md | 138 + .../combined-stream/lib/combined_stream.js | 208 + node_modules/combined-stream/package.json | 25 + node_modules/combined-stream/yarn.lock | 17 + node_modules/concat-map/.travis.yml | 4 + node_modules/concat-map/LICENSE | 18 + node_modules/concat-map/README.markdown | 62 + node_modules/concat-map/example/map.js | 6 + node_modules/concat-map/index.js | 13 + node_modules/concat-map/package.json | 43 + node_modules/concat-map/test/map.js | 39 + node_modules/delayed-stream/.npmignore | 1 + node_modules/delayed-stream/License | 19 + node_modules/delayed-stream/Makefile | 7 + node_modules/delayed-stream/Readme.md | 141 + .../delayed-stream/lib/delayed_stream.js | 107 + node_modules/delayed-stream/package.json | 27 + node_modules/event-target-shim/LICENSE | 22 + node_modules/event-target-shim/README.md | 293 + .../dist/event-target-shim.js | 871 + .../dist/event-target-shim.js.map | 1 + .../dist/event-target-shim.mjs | 862 + .../dist/event-target-shim.mjs.map | 1 + .../dist/event-target-shim.umd.js | 6 + .../dist/event-target-shim.umd.js.map | 1 + node_modules/event-target-shim/index.d.ts | 399 + node_modules/event-target-shim/package.json | 82 + node_modules/events/.airtap.yml | 15 + node_modules/events/.github/FUNDING.yml | 12 + node_modules/events/.travis.yml | 18 + node_modules/events/History.md | 118 + node_modules/events/LICENSE | 22 + node_modules/events/Readme.md | 50 + node_modules/events/events.js | 497 + node_modules/events/package.json | 37 + node_modules/events/security.md | 10 + node_modules/events/tests/add-listeners.js | 111 + .../events/tests/check-listener-leaks.js | 101 + node_modules/events/tests/common.js | 104 + node_modules/events/tests/errors.js | 13 + node_modules/events/tests/events-list.js | 28 + node_modules/events/tests/events-once.js | 234 + node_modules/events/tests/index.js | 64 + node_modules/events/tests/legacy-compat.js | 16 + node_modules/events/tests/listener-count.js | 37 + .../events/tests/listeners-side-effects.js | 56 + node_modules/events/tests/listeners.js | 168 + node_modules/events/tests/max-listeners.js | 47 + node_modules/events/tests/method-names.js | 35 + node_modules/events/tests/modify-in-emit.js | 90 + node_modules/events/tests/num-args.js | 60 + node_modules/events/tests/once.js | 83 + node_modules/events/tests/prepend.js | 31 + .../events/tests/remove-all-listeners.js | 133 + node_modules/events/tests/remove-listeners.js | 212 + .../tests/set-max-listeners-side-effects.js | 31 + .../events/tests/special-event-names.js | 45 + node_modules/events/tests/subclass.js | 66 + node_modules/events/tests/symbols.js | 25 + node_modules/form-data/License | 19 + node_modules/form-data/README.md | 350 + node_modules/form-data/README.md.bak | 350 + node_modules/form-data/index.d.ts | 51 + node_modules/form-data/lib/browser.js | 2 + node_modules/form-data/lib/form_data.js | 483 + node_modules/form-data/lib/populate.js | 10 + node_modules/form-data/package.json | 68 + node_modules/ip-regex/index.js | 24 + node_modules/ip-regex/license | 21 + node_modules/ip-regex/package.json | 45 + node_modules/ip-regex/readme.md | 63 + node_modules/mime-db/HISTORY.md | 507 + node_modules/mime-db/LICENSE | 23 + node_modules/mime-db/README.md | 100 + node_modules/mime-db/db.json | 8519 +++ node_modules/mime-db/index.js | 12 + node_modules/mime-db/package.json | 60 + node_modules/mime-types/HISTORY.md | 397 + node_modules/mime-types/LICENSE | 23 + node_modules/mime-types/README.md | 113 + node_modules/mime-types/index.js | 188 + node_modules/mime-types/package.json | 44 + .../node_modules/semver => minimatch}/LICENSE | 0 node_modules/minimatch/README.md | 230 + node_modules/minimatch/minimatch.js | 947 + node_modules/minimatch/package.json | 33 + node_modules/process/.eslintrc | 21 + node_modules/process/LICENSE | 22 + node_modules/process/README.md | 26 + node_modules/process/browser.js | 184 + node_modules/process/index.js | 2 + node_modules/process/package.json | 27 + node_modules/process/test.js | 199 + node_modules/psl/LICENSE | 9 + node_modules/psl/README.md | 215 + node_modules/psl/browserstack-logo.svg | 90 + node_modules/psl/data/rules.json | 8834 +++ node_modules/psl/dist/psl.js | 9645 +++ node_modules/psl/dist/psl.min.js | 1 + node_modules/psl/index.js | 269 + node_modules/psl/package.json | 44 + node_modules/punycode/LICENSE-MIT.txt | 20 + node_modules/punycode/README.md | 122 + node_modules/punycode/package.json | 58 + node_modules/punycode/punycode.es6.js | 441 + node_modules/punycode/punycode.js | 440 + node_modules/sax/LICENSE | 41 + node_modules/sax/README.md | 225 + node_modules/sax/lib/sax.js | 1565 + node_modules/sax/package.json | 25 + .../node_modules => }/semver/CHANGELOG.md | 0 node_modules/semver/LICENSE | 15 + .../node_modules => }/semver/README.md | 0 .../node_modules => }/semver/bin/semver.js | 0 .../node_modules => }/semver/package.json | 0 .../node_modules => }/semver/range.bnf | 0 .../node_modules => }/semver/semver.js | 0 node_modules/tough-cookie/LICENSE | 12 + node_modules/tough-cookie/README.md | 527 + node_modules/tough-cookie/lib/cookie.js | 1488 + node_modules/tough-cookie/lib/memstore.js | 181 + node_modules/tough-cookie/lib/pathMatch.js | 61 + .../tough-cookie/lib/permuteDomain.js | 56 + .../tough-cookie/lib/pubsuffix-psl.js | 38 + node_modules/tough-cookie/lib/store.js | 75 + node_modules/tough-cookie/lib/version.js | 2 + node_modules/tough-cookie/package.json | 79 + node_modules/tslib/CopyrightNotice.txt | 15 + node_modules/tslib/LICENSE.txt | 12 + node_modules/tslib/README.md | 142 + node_modules/tslib/modules/index.js | 51 + node_modules/tslib/modules/package.json | 3 + node_modules/tslib/package.json | 37 + .../index.js | 23 + .../package.json | 6 + node_modules/tslib/tslib.d.ts | 37 + node_modules/tslib/tslib.es6.html | 1 + node_modules/tslib/tslib.es6.js | 218 + node_modules/tslib/tslib.html | 1 + node_modules/tslib/tslib.js | 284 + node_modules/universalify/LICENSE | 20 + node_modules/universalify/README.md | 76 + node_modules/universalify/index.js | 25 + node_modules/universalify/package.json | 34 + node_modules/xml2js/LICENSE | 19 + node_modules/xml2js/README.md | 488 + node_modules/xml2js/lib/bom.js | 12 + node_modules/xml2js/lib/builder.js | 127 + node_modules/xml2js/lib/defaults.js | 72 + node_modules/xml2js/lib/parser.js | 381 + node_modules/xml2js/lib/processors.js | 34 + node_modules/xml2js/lib/xml2js.js | 39 + node_modules/xml2js/package.json | 92 + node_modules/xmlbuilder/CHANGELOG.md | 470 + node_modules/xmlbuilder/LICENSE | 21 + node_modules/xmlbuilder/README.md | 86 + node_modules/xmlbuilder/appveyor.yml | 20 + node_modules/xmlbuilder/lib/Derivation.js | 10 + .../xmlbuilder/lib/DocumentPosition.js | 12 + node_modules/xmlbuilder/lib/NodeType.js | 23 + node_modules/xmlbuilder/lib/OperationType.js | 11 + node_modules/xmlbuilder/lib/Utility.js | 83 + node_modules/xmlbuilder/lib/WriterState.js | 10 + node_modules/xmlbuilder/lib/XMLAttribute.js | 108 + node_modules/xmlbuilder/lib/XMLCData.js | 36 + .../xmlbuilder/lib/XMLCharacterData.js | 79 + node_modules/xmlbuilder/lib/XMLComment.js | 36 + .../xmlbuilder/lib/XMLDOMConfiguration.js | 64 + .../xmlbuilder/lib/XMLDOMErrorHandler.js | 16 + .../xmlbuilder/lib/XMLDOMImplementation.js | 32 + .../xmlbuilder/lib/XMLDOMStringList.js | 28 + node_modules/xmlbuilder/lib/XMLDTDAttList.js | 55 + node_modules/xmlbuilder/lib/XMLDTDElement.js | 38 + node_modules/xmlbuilder/lib/XMLDTDEntity.js | 97 + node_modules/xmlbuilder/lib/XMLDTDNotation.js | 52 + node_modules/xmlbuilder/lib/XMLDeclaration.js | 43 + node_modules/xmlbuilder/lib/XMLDocType.js | 186 + node_modules/xmlbuilder/lib/XMLDocument.js | 242 + node_modules/xmlbuilder/lib/XMLDocumentCB.js | 528 + .../xmlbuilder/lib/XMLDocumentFragment.js | 24 + node_modules/xmlbuilder/lib/XMLDummy.js | 31 + node_modules/xmlbuilder/lib/XMLElement.js | 298 + .../xmlbuilder/lib/XMLNamedNodeMap.js | 58 + node_modules/xmlbuilder/lib/XMLNode.js | 785 + node_modules/xmlbuilder/lib/XMLNodeFilter.js | 48 + node_modules/xmlbuilder/lib/XMLNodeList.js | 28 + .../lib/XMLProcessingInstruction.js | 49 + node_modules/xmlbuilder/lib/XMLRaw.js | 35 + .../xmlbuilder/lib/XMLStreamWriter.js | 176 + .../xmlbuilder/lib/XMLStringWriter.js | 35 + node_modules/xmlbuilder/lib/XMLStringifier.js | 240 + node_modules/xmlbuilder/lib/XMLText.js | 69 + node_modules/xmlbuilder/lib/XMLTypeInfo.js | 21 + .../xmlbuilder/lib/XMLUserDataHandler.js | 16 + node_modules/xmlbuilder/lib/XMLWriterBase.js | 428 + node_modules/xmlbuilder/lib/index.js | 65 + node_modules/xmlbuilder/package.json | 39 + package-lock.json | 984 +- 1985 files changed, 339000 insertions(+), 57 deletions(-) rename node_modules/{@actions/tool-cache/node_modules => }/.bin/semver (100%) create mode 100644 node_modules/@actions/cache/LICENSE.md create mode 100644 node_modules/@actions/cache/README.md create mode 100644 node_modules/@actions/cache/lib/cache.d.ts create mode 100644 node_modules/@actions/cache/lib/cache.js create mode 100644 node_modules/@actions/cache/lib/cache.js.map create mode 100644 node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/cacheHttpClient.js create mode 100644 node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map create mode 100644 node_modules/@actions/cache/lib/internal/cacheUtils.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/cacheUtils.js create mode 100644 node_modules/@actions/cache/lib/internal/cacheUtils.js.map create mode 100644 node_modules/@actions/cache/lib/internal/constants.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/constants.js create mode 100644 node_modules/@actions/cache/lib/internal/constants.js.map create mode 100644 node_modules/@actions/cache/lib/internal/downloadUtils.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/downloadUtils.js create mode 100644 node_modules/@actions/cache/lib/internal/downloadUtils.js.map create mode 100644 node_modules/@actions/cache/lib/internal/requestUtils.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/requestUtils.js create mode 100644 node_modules/@actions/cache/lib/internal/requestUtils.js.map create mode 100644 node_modules/@actions/cache/lib/internal/tar.d.ts create mode 100644 node_modules/@actions/cache/lib/internal/tar.js create mode 100644 node_modules/@actions/cache/lib/internal/tar.js.map create mode 100644 node_modules/@actions/cache/lib/options.d.ts create mode 100644 node_modules/@actions/cache/lib/options.js create mode 100644 node_modules/@actions/cache/lib/options.js.map create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/LICENSE create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/README.md create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.d.ts create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js.map create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.d.ts create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js.map create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.d.ts create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js.map create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.d.ts create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js.map create mode 100644 node_modules/@actions/cache/node_modules/@actions/http-client/package.json create mode 100644 node_modules/@actions/cache/package.json create mode 100644 node_modules/@actions/core/lib/summary.d.ts create mode 100644 node_modules/@actions/core/lib/summary.js create mode 100644 node_modules/@actions/core/lib/summary.js.map create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/LICENSE create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/README.md create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.d.ts create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js.map create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/index.d.ts create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js.map create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.d.ts create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js.map create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.d.ts create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js.map create mode 100644 node_modules/@actions/core/node_modules/@actions/http-client/package.json create mode 100644 node_modules/@actions/glob/LICENSE.md create mode 100644 node_modules/@actions/glob/README.md create mode 100644 node_modules/@actions/glob/lib/glob.d.ts create mode 100644 node_modules/@actions/glob/lib/glob.js create mode 100644 node_modules/@actions/glob/lib/glob.js.map create mode 100644 node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-glob-options-helper.js create mode 100644 node_modules/@actions/glob/lib/internal-glob-options-helper.js.map create mode 100644 node_modules/@actions/glob/lib/internal-glob-options.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-glob-options.js create mode 100644 node_modules/@actions/glob/lib/internal-glob-options.js.map create mode 100644 node_modules/@actions/glob/lib/internal-globber.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-globber.js create mode 100644 node_modules/@actions/glob/lib/internal-globber.js.map create mode 100644 node_modules/@actions/glob/lib/internal-match-kind.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-match-kind.js create mode 100644 node_modules/@actions/glob/lib/internal-match-kind.js.map create mode 100644 node_modules/@actions/glob/lib/internal-path-helper.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-path-helper.js create mode 100644 node_modules/@actions/glob/lib/internal-path-helper.js.map create mode 100644 node_modules/@actions/glob/lib/internal-path.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-path.js create mode 100644 node_modules/@actions/glob/lib/internal-path.js.map create mode 100644 node_modules/@actions/glob/lib/internal-pattern-helper.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-pattern-helper.js create mode 100644 node_modules/@actions/glob/lib/internal-pattern-helper.js.map create mode 100644 node_modules/@actions/glob/lib/internal-pattern.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-pattern.js create mode 100644 node_modules/@actions/glob/lib/internal-pattern.js.map create mode 100644 node_modules/@actions/glob/lib/internal-search-state.d.ts create mode 100644 node_modules/@actions/glob/lib/internal-search-state.js create mode 100644 node_modules/@actions/glob/lib/internal-search-state.js.map create mode 100644 node_modules/@actions/glob/package.json create mode 100644 node_modules/@azure/abort-controller/CHANGELOG.md create mode 100644 node_modules/@azure/abort-controller/LICENSE create mode 100644 node_modules/@azure/abort-controller/README.md create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/AbortController.js create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/index.js create mode 100644 node_modules/@azure/abort-controller/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/abort-controller/dist/index.js create mode 100644 node_modules/@azure/abort-controller/dist/index.js.map create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/README.md create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/package.json create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/abort-controller/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/abort-controller/package.json create mode 100644 node_modules/@azure/abort-controller/shims-public.d.ts create mode 100644 node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts create mode 100644 node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts create mode 100644 node_modules/@azure/abort-controller/types/3.1/index.d.ts create mode 100644 node_modules/@azure/abort-controller/types/src/AbortController.d.ts create mode 100644 node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map create mode 100644 node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts create mode 100644 node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map create mode 100644 node_modules/@azure/abort-controller/types/src/index.d.ts create mode 100644 node_modules/@azure/abort-controller/types/src/index.d.ts.map create mode 100644 node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json create mode 100644 node_modules/@azure/core-auth/CHANGELOG.md create mode 100644 node_modules/@azure/core-auth/LICENSE create mode 100644 node_modules/@azure/core-auth/README.md create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/index.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/tracing.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/tracing.js.map create mode 100644 node_modules/@azure/core-auth/dist-esm/src/typeguards.js create mode 100644 node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map create mode 100644 node_modules/@azure/core-auth/dist/index.js create mode 100644 node_modules/@azure/core-auth/dist/index.js.map create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/README.md create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/package.json create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/core-auth/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/core-auth/package.json create mode 100644 node_modules/@azure/core-auth/types/3.1/core-auth.d.ts create mode 100644 node_modules/@azure/core-auth/types/latest/core-auth.d.ts create mode 100644 node_modules/@azure/core-http/CHANGELOG.md create mode 100644 node_modules/@azure/core-http/LICENSE create mode 100644 node_modules/@azure/core-http/README.md create mode 100644 node_modules/@azure/core-http/dist-esm/src/coreHttp.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpClient.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpClient.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpClientCache.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpHeaders.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/log.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/log.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationArguments.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationOptions.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationParameter.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationResponse.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationSpec.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/proxyAgent.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/restError.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/restError.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/serializer.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/serializer.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/serviceClient.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/url.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/url.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/base64.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/base64.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/constants.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/constants.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/delay.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/delay.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/inspect.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/typeguards.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/utils.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/utils.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/xml.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/util/xml.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/webResource.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/webResource.js.map create mode 100644 node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js create mode 100644 node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map create mode 100644 node_modules/@azure/core-http/dist/index.js create mode 100644 node_modules/@azure/core-http/dist/index.js.map create mode 100644 node_modules/@azure/core-http/dom-shim.d.ts create mode 120000 node_modules/@azure/core-http/node_modules/.bin/uuid create mode 100644 node_modules/@azure/core-http/node_modules/form-data/License create mode 100644 node_modules/@azure/core-http/node_modules/form-data/README.md.bak create mode 100644 node_modules/@azure/core-http/node_modules/form-data/Readme.md create mode 100644 node_modules/@azure/core-http/node_modules/form-data/index.d.ts create mode 100644 node_modules/@azure/core-http/node_modules/form-data/lib/browser.js create mode 100644 node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js create mode 100644 node_modules/@azure/core-http/node_modules/form-data/lib/populate.js create mode 100644 node_modules/@azure/core-http/node_modules/form-data/package.json create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/README.md create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js create mode 100644 node_modules/@azure/core-http/node_modules/tough-cookie/package.json create mode 100644 node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/core-http/node_modules/tslib/README.md create mode 100644 node_modules/@azure/core-http/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/core-http/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/core-http/node_modules/tslib/package.json create mode 100644 node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/core-http/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/core-http/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/CHANGELOG.md create mode 100644 node_modules/@azure/core-http/node_modules/uuid/CONTRIBUTING.md create mode 100644 node_modules/@azure/core-http/node_modules/uuid/LICENSE.md create mode 100644 node_modules/@azure/core-http/node_modules/uuid/README.md create mode 100755 node_modules/@azure/core-http/node_modules/uuid/dist/bin/uuid create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/index.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/md5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/nil.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/parse.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/regex.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/rng.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/sha1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/stringify.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v3.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v35.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v4.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/validate.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/version.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/index.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/md5-browser.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/md5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/nil.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/parse.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/regex.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/rng-browser.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/rng.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/sha1-browser.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/sha1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/stringify.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuid.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidNIL.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidParse.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidStringify.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidValidate.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidVersion.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv1.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv3.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv4.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv5.min.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/uuid-bin.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/v1.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/v3.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/v35.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/v4.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/v5.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/validate.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/dist/version.js create mode 100644 node_modules/@azure/core-http/node_modules/uuid/package.json create mode 100644 node_modules/@azure/core-http/node_modules/uuid/wrapper.mjs create mode 100644 node_modules/@azure/core-http/package.json create mode 100644 node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/log.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/restError.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/serializer.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/url.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/webResource.d.ts create mode 100644 node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/log.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/log.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/restError.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/restError.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/serializer.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/url.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/url.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/base64.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/constants.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/delay.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/utils.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/util/xml.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/webResource.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map create mode 100644 node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts create mode 100644 node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map create mode 100644 node_modules/@azure/core-lro/CHANGELOG.md create mode 100644 node_modules/@azure/core-lro/LICENSE create mode 100644 node_modules/@azure/core-lro/README.md create mode 100644 node_modules/@azure/core-lro/dist-esm/src/index.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/pollOperation.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map create mode 100644 node_modules/@azure/core-lro/dist-esm/src/poller.js create mode 100644 node_modules/@azure/core-lro/dist-esm/src/poller.js.map create mode 100644 node_modules/@azure/core-lro/dist/index.js create mode 100644 node_modules/@azure/core-lro/dist/index.js.map create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/README.md create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/package.json create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/core-lro/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/core-lro/package.json create mode 100644 node_modules/@azure/core-lro/types/core-lro.d.ts create mode 100644 node_modules/@azure/core-paging/LICENSE create mode 100644 node_modules/@azure/core-paging/README.md create mode 100644 node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js create mode 100644 node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map create mode 100644 node_modules/@azure/core-paging/dist-esm/src/index.js create mode 100644 node_modules/@azure/core-paging/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/core-paging/dist-esm/src/models.js create mode 100644 node_modules/@azure/core-paging/dist-esm/src/models.js.map create mode 100644 node_modules/@azure/core-paging/dist/index.js create mode 100644 node_modules/@azure/core-paging/dist/index.js.map create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/README.md create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/package.json create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/core-paging/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/core-paging/package.json create mode 100644 node_modules/@azure/core-paging/types/3.1/core-paging.d.ts create mode 100644 node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts create mode 100644 node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts create mode 100644 node_modules/@azure/core-paging/types/3.1/src/index.d.ts create mode 100644 node_modules/@azure/core-paging/types/3.1/src/models.d.ts create mode 100644 node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts create mode 100644 node_modules/@azure/core-paging/types/latest/core-paging.d.ts create mode 100644 node_modules/@azure/core-tracing/CHANGELOG.md create mode 100644 node_modules/@azure/core-tracing/LICENSE create mode 100644 node_modules/@azure/core-tracing/README.md create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/createSpan.js create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/index.js create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/interfaces.js create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js create mode 100644 node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map create mode 100644 node_modules/@azure/core-tracing/dist/index.js create mode 100644 node_modules/@azure/core-tracing/dist/index.js.map create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/README.md create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/package.json create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/core-tracing/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/core-tracing/package.json create mode 100644 node_modules/@azure/core-tracing/types/core-tracing.d.ts create mode 100644 node_modules/@azure/logger/CHANGELOG.md create mode 100644 node_modules/@azure/logger/LICENSE create mode 100644 node_modules/@azure/logger/README.md create mode 100644 node_modules/@azure/logger/dist-esm/src/debug.js create mode 100644 node_modules/@azure/logger/dist-esm/src/debug.js.map create mode 100644 node_modules/@azure/logger/dist-esm/src/index.js create mode 100644 node_modules/@azure/logger/dist-esm/src/index.js.map create mode 100644 node_modules/@azure/logger/dist-esm/src/log.browser.js create mode 100644 node_modules/@azure/logger/dist-esm/src/log.browser.js.map create mode 100644 node_modules/@azure/logger/dist-esm/src/log.js create mode 100644 node_modules/@azure/logger/dist-esm/src/log.js.map create mode 100644 node_modules/@azure/logger/dist/index.js create mode 100644 node_modules/@azure/logger/dist/index.js.map create mode 100644 node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/logger/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/logger/node_modules/tslib/README.md create mode 100644 node_modules/@azure/logger/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/logger/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/logger/node_modules/tslib/package.json create mode 100644 node_modules/@azure/logger/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/logger/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/logger/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/logger/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/logger/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/logger/package.json create mode 100644 node_modules/@azure/logger/types/logger.d.ts create mode 100644 node_modules/@azure/ms-rest-js/LICENSE create mode 100644 node_modules/@azure/ms-rest-js/README.md create mode 100644 node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.browser.js create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js.map create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.node.js create mode 100644 node_modules/@azure/ms-rest-js/dist/msRest.node.js.map create mode 100644 node_modules/@azure/ms-rest-js/dom-shim.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/msRest.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/msRest.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationArguments.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationParameter.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationResponse.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationSpec.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/restError.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/restError.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/restError.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serializer.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serializer.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serviceClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/url.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/url.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/url.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/constants.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/utils.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/webResource.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/webResource.js.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts create mode 100644 node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map create mode 100644 node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js create mode 100644 node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map create mode 100644 node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/httpClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/httpHeaders.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/msRest.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/operationArguments.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/operationParameter.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/operationResponse.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/operationSpec.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/proxyAgent.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/restError.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/serializer.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/serviceClient.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/url.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/base64.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/constants.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/utils.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/util/xml.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/webResource.ts create mode 100644 node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts create mode 120000 node_modules/@azure/ms-rest-js/node_modules/.bin/uuid create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/CHANGELOG.md create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/CONTRIBUTING.md create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/LICENSE.md create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/README.md create mode 100755 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/bin/uuid create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/index.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/md5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/nil.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/parse.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/regex.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/rng.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/sha1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/stringify.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v3.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v35.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v4.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/validate.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/version.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/index.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/md5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/nil.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/parse.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/regex.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/rng.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/sha1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/stringify.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v3.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v35.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v4.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/validate.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/version.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/index.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5-browser.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/nil.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/parse.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/regex.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng-browser.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1-browser.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/stringify.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuid.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidNIL.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidParse.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidStringify.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidValidate.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidVersion.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv1.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv3.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv4.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv5.min.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/uuid-bin.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v1.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v3.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v35.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v4.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v5.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/validate.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/dist/version.js create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/package.json create mode 100644 node_modules/@azure/ms-rest-js/node_modules/uuid/wrapper.mjs create mode 100644 node_modules/@azure/ms-rest-js/package.json create mode 100644 node_modules/@azure/storage-blob/CHANGELOG.md create mode 100644 node_modules/@azure/storage-blob/LICENSE create mode 100644 node_modules/@azure/storage-blob/README.md create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js create mode 100644 node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map create mode 100644 node_modules/@azure/storage-blob/dist/index.js create mode 100644 node_modules/@azure/storage-blob/dist/index.js.map create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/README.md create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/package.json create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/tslib.html create mode 100644 node_modules/@azure/storage-blob/node_modules/tslib/tslib.js create mode 100644 node_modules/@azure/storage-blob/package.json create mode 100644 node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts create mode 100644 node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts create mode 100644 node_modules/@opentelemetry/api/CHANGELOG.md create mode 100644 node_modules/@opentelemetry/api/LICENSE create mode 100644 node_modules/@opentelemetry/api/README.md create mode 100644 node_modules/@opentelemetry/api/build/esm/api/context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/api/context.js create mode 100644 node_modules/@opentelemetry/api/build/esm/api/context.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/api/diag.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/api/diag.js create mode 100644 node_modules/@opentelemetry/api/build/esm/api/diag.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/api/propagation.js create mode 100644 node_modules/@opentelemetry/api/build/esm/api/propagation.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/api/trace.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/api/trace.js create mode 100644 node_modules/@opentelemetry/api/build/esm/api/trace.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/types.js create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/utils.js create mode 100644 node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Attributes.js create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Exception.js create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Exception.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Time.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Time.js create mode 100644 node_modules/@opentelemetry/api/build/esm/common/Time.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js create mode 100644 node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/context/context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/context/context.js create mode 100644 node_modules/@opentelemetry/api/build/esm/context/context.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/context/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/context/types.js create mode 100644 node_modules/@opentelemetry/api/build/esm/context/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/index.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/types.js create mode 100644 node_modules/@opentelemetry/api/build/esm/diag/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/index.js create mode 100644 node_modules/@opentelemetry/api/build/esm/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/global-utils.js create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/semver.js create mode 100644 node_modules/@opentelemetry/api/build/esm/internal/semver.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/index.js create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/index.js create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/index.js create mode 100644 node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js create mode 100644 node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/Sampler.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/attributes.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/context-utils.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/link.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/link.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/link.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_context.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_kind.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/status.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/status.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/status.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_state.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js create mode 100644 node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map create mode 100644 node_modules/@opentelemetry/api/build/esm/version.d.ts create mode 100644 node_modules/@opentelemetry/api/build/esm/version.js create mode 100644 node_modules/@opentelemetry/api/build/esm/version.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/api/context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/api/context.js create mode 100644 node_modules/@opentelemetry/api/build/src/api/context.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/api/diag.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/api/diag.js create mode 100644 node_modules/@opentelemetry/api/build/src/api/diag.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/api/propagation.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/api/propagation.js create mode 100644 node_modules/@opentelemetry/api/build/src/api/propagation.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/api/trace.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/api/trace.js create mode 100644 node_modules/@opentelemetry/api/build/src/api/trace.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/types.js create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/utils.js create mode 100644 node_modules/@opentelemetry/api/build/src/baggage/utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/common/Attributes.js create mode 100644 node_modules/@opentelemetry/api/build/src/common/Attributes.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/common/Exception.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/common/Exception.js create mode 100644 node_modules/@opentelemetry/api/build/src/common/Exception.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/common/Time.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/common/Time.js create mode 100644 node_modules/@opentelemetry/api/build/src/common/Time.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js create mode 100644 node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/context/context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/context/context.js create mode 100644 node_modules/@opentelemetry/api/build/src/context/context.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/context/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/context/types.js create mode 100644 node_modules/@opentelemetry/api/build/src/context/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/index.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/diag/types.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/diag/types.js create mode 100644 node_modules/@opentelemetry/api/build/src/diag/types.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/index.js create mode 100644 node_modules/@opentelemetry/api/build/src/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/internal/global-utils.js create mode 100644 node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/internal/semver.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/internal/semver.js create mode 100644 node_modules/@opentelemetry/api/build/src/internal/semver.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/index.js create mode 100644 node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/platform/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/platform/index.js create mode 100644 node_modules/@opentelemetry/api/build/src/platform/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/index.js create mode 100644 node_modules/@opentelemetry/api/build/src/platform/node/index.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js create mode 100644 node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/Sampler.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/attributes.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/attributes.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/context-utils.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/utils.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/link.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/link.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/link.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_context.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_context.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_kind.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/status.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/status.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/status.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_flags.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_state.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_options.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js create mode 100644 node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map create mode 100644 node_modules/@opentelemetry/api/build/src/version.d.ts create mode 100644 node_modules/@opentelemetry/api/build/src/version.js create mode 100644 node_modules/@opentelemetry/api/build/src/version.js.map create mode 100644 node_modules/@opentelemetry/api/package.json create mode 100755 node_modules/@types/node-fetch/LICENSE create mode 100755 node_modules/@types/node-fetch/README.md create mode 100755 node_modules/@types/node-fetch/externals.d.ts create mode 100755 node_modules/@types/node-fetch/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/License create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/README.md.bak create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/Readme.md create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/index.d.ts create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js create mode 100644 node_modules/@types/node-fetch/node_modules/form-data/package.json create mode 100755 node_modules/@types/node-fetch/package.json create mode 100755 node_modules/@types/tunnel/LICENSE create mode 100755 node_modules/@types/tunnel/README.md create mode 100755 node_modules/@types/tunnel/index.d.ts create mode 100755 node_modules/@types/tunnel/package.json create mode 100644 node_modules/abort-controller/LICENSE create mode 100644 node_modules/abort-controller/README.md create mode 100644 node_modules/abort-controller/browser.js create mode 100644 node_modules/abort-controller/browser.mjs create mode 100644 node_modules/abort-controller/dist/abort-controller.d.ts create mode 100644 node_modules/abort-controller/dist/abort-controller.js create mode 100644 node_modules/abort-controller/dist/abort-controller.js.map create mode 100644 node_modules/abort-controller/dist/abort-controller.mjs create mode 100644 node_modules/abort-controller/dist/abort-controller.mjs.map create mode 100644 node_modules/abort-controller/dist/abort-controller.umd.js create mode 100644 node_modules/abort-controller/dist/abort-controller.umd.js.map create mode 100644 node_modules/abort-controller/package.json create mode 100644 node_modules/abort-controller/polyfill.js create mode 100644 node_modules/abort-controller/polyfill.mjs create mode 100644 node_modules/asynckit/LICENSE create mode 100644 node_modules/asynckit/README.md create mode 100644 node_modules/asynckit/bench.js create mode 100644 node_modules/asynckit/index.js create mode 100644 node_modules/asynckit/lib/abort.js create mode 100644 node_modules/asynckit/lib/async.js create mode 100644 node_modules/asynckit/lib/defer.js create mode 100644 node_modules/asynckit/lib/iterate.js create mode 100644 node_modules/asynckit/lib/readable_asynckit.js create mode 100644 node_modules/asynckit/lib/readable_parallel.js create mode 100644 node_modules/asynckit/lib/readable_serial.js create mode 100644 node_modules/asynckit/lib/readable_serial_ordered.js create mode 100644 node_modules/asynckit/lib/state.js create mode 100644 node_modules/asynckit/lib/streamify.js create mode 100644 node_modules/asynckit/lib/terminator.js create mode 100644 node_modules/asynckit/package.json create mode 100644 node_modules/asynckit/parallel.js create mode 100644 node_modules/asynckit/serial.js create mode 100644 node_modules/asynckit/serialOrdered.js create mode 100644 node_modules/asynckit/stream.js create mode 100644 node_modules/balanced-match/.github/FUNDING.yml create mode 100644 node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/balanced-match/README.md create mode 100644 node_modules/balanced-match/index.js create mode 100644 node_modules/balanced-match/package.json create mode 100644 node_modules/brace-expansion/LICENSE create mode 100644 node_modules/brace-expansion/README.md create mode 100644 node_modules/brace-expansion/index.js create mode 100644 node_modules/brace-expansion/package.json create mode 100644 node_modules/cache/.eslintrc.json create mode 100644 node_modules/cache/.gitattributes create mode 100644 node_modules/cache/.github/CODEOWNERS create mode 100644 node_modules/cache/.github/auto_assign.yml create mode 100644 node_modules/cache/.github/workflows/auto-assign-issues.yml create mode 100644 node_modules/cache/.github/workflows/auto-assign.yml create mode 100644 node_modules/cache/.github/workflows/check-dist.yml create mode 100644 node_modules/cache/.github/workflows/close-inactive-issues.yml create mode 100644 node_modules/cache/.github/workflows/codeql.yml create mode 100644 node_modules/cache/.github/workflows/licensed.yml create mode 100644 node_modules/cache/.github/workflows/workflow.yml create mode 100644 node_modules/cache/.licensed.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/cache.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/core.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/exec.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/glob.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/http-client.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@actions/io.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/abort-controller.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-auth.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-http.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-lro.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-paging.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/core-tracing.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/logger.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/ms-rest-js.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@azure/storage-blob.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@opentelemetry/api.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@types/node-fetch.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@types/node.dep.yml create mode 100644 node_modules/cache/.licenses/npm/@types/tunnel.dep.yml create mode 100644 node_modules/cache/.licenses/npm/abort-controller.dep.yml create mode 100644 node_modules/cache/.licenses/npm/asynckit.dep.yml create mode 100644 node_modules/cache/.licenses/npm/balanced-match.dep.yml create mode 100644 node_modules/cache/.licenses/npm/brace-expansion.dep.yml create mode 100644 node_modules/cache/.licenses/npm/combined-stream.dep.yml create mode 100644 node_modules/cache/.licenses/npm/concat-map.dep.yml create mode 100644 node_modules/cache/.licenses/npm/delayed-stream.dep.yml create mode 100644 node_modules/cache/.licenses/npm/event-target-shim.dep.yml create mode 100644 node_modules/cache/.licenses/npm/events.dep.yml create mode 100644 node_modules/cache/.licenses/npm/form-data-2.5.1.dep.yml create mode 100644 node_modules/cache/.licenses/npm/form-data-3.0.1.dep.yml create mode 100644 node_modules/cache/.licenses/npm/form-data-4.0.0.dep.yml create mode 100644 node_modules/cache/.licenses/npm/ip-regex.dep.yml create mode 100644 node_modules/cache/.licenses/npm/mime-db.dep.yml create mode 100644 node_modules/cache/.licenses/npm/mime-types.dep.yml create mode 100644 node_modules/cache/.licenses/npm/minimatch.dep.yml create mode 100644 node_modules/cache/.licenses/npm/node-fetch.dep.yml create mode 100644 node_modules/cache/.licenses/npm/process.dep.yml create mode 100644 node_modules/cache/.licenses/npm/psl.dep.yml create mode 100644 node_modules/cache/.licenses/npm/punycode.dep.yml create mode 100644 node_modules/cache/.licenses/npm/sax.dep.yml create mode 100644 node_modules/cache/.licenses/npm/semver.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tough-cookie-3.0.1.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tough-cookie-4.0.0.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tr46.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tslib-1.14.1.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tslib-2.3.1.dep.yml create mode 100644 node_modules/cache/.licenses/npm/tunnel.dep.yml create mode 100644 node_modules/cache/.licenses/npm/universalify.dep.yml create mode 100644 node_modules/cache/.licenses/npm/uuid-3.4.0.dep.yml create mode 100644 node_modules/cache/.licenses/npm/uuid-8.3.2.dep.yml create mode 100644 node_modules/cache/.licenses/npm/webidl-conversions.dep.yml create mode 100644 node_modules/cache/.licenses/npm/whatwg-url.dep.yml create mode 100644 node_modules/cache/.licenses/npm/xml2js.dep.yml create mode 100644 node_modules/cache/.licenses/npm/xmlbuilder.dep.yml create mode 100644 node_modules/cache/.prettierrc.json create mode 100644 node_modules/cache/CODE_OF_CONDUCT.md create mode 100644 node_modules/cache/CONTRIBUTING.md create mode 100644 node_modules/cache/LICENSE create mode 100644 node_modules/cache/README.md create mode 100644 node_modules/cache/RELEASES.md create mode 100644 node_modules/cache/__tests__/__fixtures__/helloWorld.txt create mode 100644 node_modules/cache/__tests__/actionUtils.test.ts create mode 100755 node_modules/cache/__tests__/create-cache-files.sh create mode 100644 node_modules/cache/__tests__/restore.test.ts create mode 100644 node_modules/cache/__tests__/save.test.ts create mode 100755 node_modules/cache/__tests__/verify-cache-files.sh create mode 100644 node_modules/cache/action.yml create mode 100644 node_modules/cache/dist/restore/index.js create mode 100644 node_modules/cache/dist/save/index.js create mode 100644 node_modules/cache/examples.md create mode 100644 node_modules/cache/jest.config.js create mode 100644 node_modules/cache/package.json create mode 100644 node_modules/cache/src/constants.ts create mode 100644 node_modules/cache/src/restore.ts create mode 100644 node_modules/cache/src/save.ts create mode 100644 node_modules/cache/src/utils/actionUtils.ts create mode 100644 node_modules/cache/src/utils/testUtils.ts create mode 100644 node_modules/cache/tsconfig.json create mode 100644 node_modules/combined-stream/License create mode 100644 node_modules/combined-stream/Readme.md create mode 100644 node_modules/combined-stream/lib/combined_stream.js create mode 100644 node_modules/combined-stream/package.json create mode 100644 node_modules/combined-stream/yarn.lock create mode 100644 node_modules/concat-map/.travis.yml create mode 100644 node_modules/concat-map/LICENSE create mode 100644 node_modules/concat-map/README.markdown create mode 100644 node_modules/concat-map/example/map.js create mode 100644 node_modules/concat-map/index.js create mode 100644 node_modules/concat-map/package.json create mode 100644 node_modules/concat-map/test/map.js create mode 100644 node_modules/delayed-stream/.npmignore create mode 100644 node_modules/delayed-stream/License create mode 100644 node_modules/delayed-stream/Makefile create mode 100644 node_modules/delayed-stream/Readme.md create mode 100644 node_modules/delayed-stream/lib/delayed_stream.js create mode 100644 node_modules/delayed-stream/package.json create mode 100644 node_modules/event-target-shim/LICENSE create mode 100644 node_modules/event-target-shim/README.md create mode 100644 node_modules/event-target-shim/dist/event-target-shim.js create mode 100644 node_modules/event-target-shim/dist/event-target-shim.js.map create mode 100644 node_modules/event-target-shim/dist/event-target-shim.mjs create mode 100644 node_modules/event-target-shim/dist/event-target-shim.mjs.map create mode 100644 node_modules/event-target-shim/dist/event-target-shim.umd.js create mode 100644 node_modules/event-target-shim/dist/event-target-shim.umd.js.map create mode 100644 node_modules/event-target-shim/index.d.ts create mode 100644 node_modules/event-target-shim/package.json create mode 100644 node_modules/events/.airtap.yml create mode 100644 node_modules/events/.github/FUNDING.yml create mode 100644 node_modules/events/.travis.yml create mode 100644 node_modules/events/History.md create mode 100644 node_modules/events/LICENSE create mode 100644 node_modules/events/Readme.md create mode 100644 node_modules/events/events.js create mode 100644 node_modules/events/package.json create mode 100644 node_modules/events/security.md create mode 100644 node_modules/events/tests/add-listeners.js create mode 100644 node_modules/events/tests/check-listener-leaks.js create mode 100644 node_modules/events/tests/common.js create mode 100644 node_modules/events/tests/errors.js create mode 100644 node_modules/events/tests/events-list.js create mode 100644 node_modules/events/tests/events-once.js create mode 100644 node_modules/events/tests/index.js create mode 100644 node_modules/events/tests/legacy-compat.js create mode 100644 node_modules/events/tests/listener-count.js create mode 100644 node_modules/events/tests/listeners-side-effects.js create mode 100644 node_modules/events/tests/listeners.js create mode 100644 node_modules/events/tests/max-listeners.js create mode 100644 node_modules/events/tests/method-names.js create mode 100644 node_modules/events/tests/modify-in-emit.js create mode 100644 node_modules/events/tests/num-args.js create mode 100644 node_modules/events/tests/once.js create mode 100644 node_modules/events/tests/prepend.js create mode 100644 node_modules/events/tests/remove-all-listeners.js create mode 100644 node_modules/events/tests/remove-listeners.js create mode 100644 node_modules/events/tests/set-max-listeners-side-effects.js create mode 100644 node_modules/events/tests/special-event-names.js create mode 100644 node_modules/events/tests/subclass.js create mode 100644 node_modules/events/tests/symbols.js create mode 100644 node_modules/form-data/License create mode 100644 node_modules/form-data/README.md create mode 100644 node_modules/form-data/README.md.bak create mode 100644 node_modules/form-data/index.d.ts create mode 100644 node_modules/form-data/lib/browser.js create mode 100644 node_modules/form-data/lib/form_data.js create mode 100644 node_modules/form-data/lib/populate.js create mode 100644 node_modules/form-data/package.json create mode 100644 node_modules/ip-regex/index.js create mode 100644 node_modules/ip-regex/license create mode 100644 node_modules/ip-regex/package.json create mode 100644 node_modules/ip-regex/readme.md create mode 100644 node_modules/mime-db/HISTORY.md create mode 100644 node_modules/mime-db/LICENSE create mode 100644 node_modules/mime-db/README.md create mode 100644 node_modules/mime-db/db.json create mode 100644 node_modules/mime-db/index.js create mode 100644 node_modules/mime-db/package.json create mode 100644 node_modules/mime-types/HISTORY.md create mode 100644 node_modules/mime-types/LICENSE create mode 100644 node_modules/mime-types/README.md create mode 100644 node_modules/mime-types/index.js create mode 100644 node_modules/mime-types/package.json rename node_modules/{@actions/tool-cache/node_modules/semver => minimatch}/LICENSE (100%) create mode 100644 node_modules/minimatch/README.md create mode 100644 node_modules/minimatch/minimatch.js create mode 100644 node_modules/minimatch/package.json create mode 100644 node_modules/process/.eslintrc create mode 100644 node_modules/process/LICENSE create mode 100644 node_modules/process/README.md create mode 100644 node_modules/process/browser.js create mode 100644 node_modules/process/index.js create mode 100644 node_modules/process/package.json create mode 100644 node_modules/process/test.js create mode 100644 node_modules/psl/LICENSE create mode 100644 node_modules/psl/README.md create mode 100644 node_modules/psl/browserstack-logo.svg create mode 100644 node_modules/psl/data/rules.json create mode 100644 node_modules/psl/dist/psl.js create mode 100644 node_modules/psl/dist/psl.min.js create mode 100644 node_modules/psl/index.js create mode 100644 node_modules/psl/package.json create mode 100644 node_modules/punycode/LICENSE-MIT.txt create mode 100644 node_modules/punycode/README.md create mode 100644 node_modules/punycode/package.json create mode 100644 node_modules/punycode/punycode.es6.js create mode 100644 node_modules/punycode/punycode.js create mode 100644 node_modules/sax/LICENSE create mode 100644 node_modules/sax/README.md create mode 100644 node_modules/sax/lib/sax.js create mode 100644 node_modules/sax/package.json rename node_modules/{@actions/tool-cache/node_modules => }/semver/CHANGELOG.md (100%) create mode 100644 node_modules/semver/LICENSE rename node_modules/{@actions/tool-cache/node_modules => }/semver/README.md (100%) rename node_modules/{@actions/tool-cache/node_modules => }/semver/bin/semver.js (100%) rename node_modules/{@actions/tool-cache/node_modules => }/semver/package.json (100%) rename node_modules/{@actions/tool-cache/node_modules => }/semver/range.bnf (100%) rename node_modules/{@actions/tool-cache/node_modules => }/semver/semver.js (100%) create mode 100644 node_modules/tough-cookie/LICENSE create mode 100644 node_modules/tough-cookie/README.md create mode 100644 node_modules/tough-cookie/lib/cookie.js create mode 100644 node_modules/tough-cookie/lib/memstore.js create mode 100644 node_modules/tough-cookie/lib/pathMatch.js create mode 100644 node_modules/tough-cookie/lib/permuteDomain.js create mode 100644 node_modules/tough-cookie/lib/pubsuffix-psl.js create mode 100644 node_modules/tough-cookie/lib/store.js create mode 100644 node_modules/tough-cookie/lib/version.js create mode 100644 node_modules/tough-cookie/package.json create mode 100644 node_modules/tslib/CopyrightNotice.txt create mode 100644 node_modules/tslib/LICENSE.txt create mode 100644 node_modules/tslib/README.md create mode 100644 node_modules/tslib/modules/index.js create mode 100644 node_modules/tslib/modules/package.json create mode 100644 node_modules/tslib/package.json create mode 100644 node_modules/tslib/test/validateModuleExportsMatchCommonJS/index.js create mode 100644 node_modules/tslib/test/validateModuleExportsMatchCommonJS/package.json create mode 100644 node_modules/tslib/tslib.d.ts create mode 100644 node_modules/tslib/tslib.es6.html create mode 100644 node_modules/tslib/tslib.es6.js create mode 100644 node_modules/tslib/tslib.html create mode 100644 node_modules/tslib/tslib.js create mode 100644 node_modules/universalify/LICENSE create mode 100644 node_modules/universalify/README.md create mode 100644 node_modules/universalify/index.js create mode 100644 node_modules/universalify/package.json create mode 100644 node_modules/xml2js/LICENSE create mode 100644 node_modules/xml2js/README.md create mode 100644 node_modules/xml2js/lib/bom.js create mode 100644 node_modules/xml2js/lib/builder.js create mode 100644 node_modules/xml2js/lib/defaults.js create mode 100644 node_modules/xml2js/lib/parser.js create mode 100644 node_modules/xml2js/lib/processors.js create mode 100644 node_modules/xml2js/lib/xml2js.js create mode 100644 node_modules/xml2js/package.json create mode 100644 node_modules/xmlbuilder/CHANGELOG.md create mode 100644 node_modules/xmlbuilder/LICENSE create mode 100644 node_modules/xmlbuilder/README.md create mode 100644 node_modules/xmlbuilder/appveyor.yml create mode 100644 node_modules/xmlbuilder/lib/Derivation.js create mode 100644 node_modules/xmlbuilder/lib/DocumentPosition.js create mode 100644 node_modules/xmlbuilder/lib/NodeType.js create mode 100644 node_modules/xmlbuilder/lib/OperationType.js create mode 100644 node_modules/xmlbuilder/lib/Utility.js create mode 100644 node_modules/xmlbuilder/lib/WriterState.js create mode 100644 node_modules/xmlbuilder/lib/XMLAttribute.js create mode 100644 node_modules/xmlbuilder/lib/XMLCData.js create mode 100644 node_modules/xmlbuilder/lib/XMLCharacterData.js create mode 100644 node_modules/xmlbuilder/lib/XMLComment.js create mode 100644 node_modules/xmlbuilder/lib/XMLDOMConfiguration.js create mode 100644 node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js create mode 100644 node_modules/xmlbuilder/lib/XMLDOMImplementation.js create mode 100644 node_modules/xmlbuilder/lib/XMLDOMStringList.js create mode 100644 node_modules/xmlbuilder/lib/XMLDTDAttList.js create mode 100644 node_modules/xmlbuilder/lib/XMLDTDElement.js create mode 100644 node_modules/xmlbuilder/lib/XMLDTDEntity.js create mode 100644 node_modules/xmlbuilder/lib/XMLDTDNotation.js create mode 100644 node_modules/xmlbuilder/lib/XMLDeclaration.js create mode 100644 node_modules/xmlbuilder/lib/XMLDocType.js create mode 100644 node_modules/xmlbuilder/lib/XMLDocument.js create mode 100644 node_modules/xmlbuilder/lib/XMLDocumentCB.js create mode 100644 node_modules/xmlbuilder/lib/XMLDocumentFragment.js create mode 100644 node_modules/xmlbuilder/lib/XMLDummy.js create mode 100644 node_modules/xmlbuilder/lib/XMLElement.js create mode 100644 node_modules/xmlbuilder/lib/XMLNamedNodeMap.js create mode 100644 node_modules/xmlbuilder/lib/XMLNode.js create mode 100644 node_modules/xmlbuilder/lib/XMLNodeFilter.js create mode 100644 node_modules/xmlbuilder/lib/XMLNodeList.js create mode 100644 node_modules/xmlbuilder/lib/XMLProcessingInstruction.js create mode 100644 node_modules/xmlbuilder/lib/XMLRaw.js create mode 100644 node_modules/xmlbuilder/lib/XMLStreamWriter.js create mode 100644 node_modules/xmlbuilder/lib/XMLStringWriter.js create mode 100644 node_modules/xmlbuilder/lib/XMLStringifier.js create mode 100644 node_modules/xmlbuilder/lib/XMLText.js create mode 100644 node_modules/xmlbuilder/lib/XMLTypeInfo.js create mode 100644 node_modules/xmlbuilder/lib/XMLUserDataHandler.js create mode 100644 node_modules/xmlbuilder/lib/XMLWriterBase.js create mode 100644 node_modules/xmlbuilder/lib/index.js create mode 100644 node_modules/xmlbuilder/package.json diff --git a/node_modules/@actions/tool-cache/node_modules/.bin/semver b/node_modules/.bin/semver similarity index 100% rename from node_modules/@actions/tool-cache/node_modules/.bin/semver rename to node_modules/.bin/semver diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index f3a79da1..24676bef 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -4,18 +4,50 @@ "lockfileVersion": 2, "requires": true, "packages": { + "node_modules/@actions/cache": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.4.tgz", + "integrity": "sha512-B6c8JNTcgMUcstwbCSCjgHrSxzJ5ABxX6Y1Q9lfe8lT08ZVrnUetEZfxYRCwTgAf/gHsjdgWHW4O8Z+VvPvUuQ==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dependencies": { + "tunnel": "^0.0.6" + } + }, "node_modules/@actions/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", - "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.8.2.tgz", + "integrity": "sha512-FXcBL7nyik8K5ODeCKlxi+vts7torOkoDAKfeh61EAkAy1HAvwn9uVzZBY0f15YcQTcZZ2/iSGBFHEuioZWfDA==", + "dependencies": { + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/core/node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", "dependencies": { - "@actions/http-client": "^1.0.11" + "tunnel": "^0.0.6" } }, "node_modules/@actions/exec": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz", - "integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", "dependencies": { "@actions/io": "^1.0.1" } @@ -31,6 +63,15 @@ "@octokit/plugin-rest-endpoint-methods": "^5.13.0" } }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "node_modules/@actions/http-client": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", @@ -40,9 +81,9 @@ } }, "node_modules/@actions/io": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz", - "integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" }, "node_modules/@actions/tool-cache": { "version": "1.7.1", @@ -57,14 +98,217 @@ "uuid": "^3.3.2" } }, - "node_modules/@actions/tool-cache/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/abort-controller/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-auth": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.3.2.tgz", + "integrity": "sha512-7CU6DmCHIZp5ZPiZ9r3J17lTKMmYsm/zGvNkjArQwPkrLlZ1TZ+EUYfGgh2X31OLMVAQCTJZW4cXHJi02EbJnA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-http": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.5.tgz", + "integrity": "sha512-kctMqSQ6zfnlFpuYzfUKadeTyOQYbIQ+3Rj7dzVC3Dk1dOnHroTwR9hLYKX8/n85iJpkyaksaXpuh5L7GJRYuQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@azure/core-http/node_modules/tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/core-http/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-http/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "bin": { - "semver": "bin/semver.js" + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-paging": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.3.0.tgz", + "integrity": "sha512-H6Tg9eBm0brHqLy0OSAGzxIh1t4UL8eZVrSUMJ60Ra9cwq2pOskFqVpz2pYoHDsBY1jZ4V/P8LRGb5D5pmC6rg==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-paging/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-tracing/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/logger": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.3.tgz", + "integrity": "sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/logger/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.1.tgz", + "integrity": "sha512-LLi4jRe/qy5IM8U2CkoDgSZp2OH+MgDe2wePmhz8uY84Svc53EhHaamVyoU6BjjHBxvCRh1vcD1urJDccrxqIw==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.10.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.10.0.tgz", + "integrity": "sha512-FBEPKGnvtQJS8V8Tg1P9obgmVD9AodrIfwtwhBpsjenClhFyugMp3HPJY0tF7rInUB/CivKBCbnQKrUnKxqxzw==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" } }, + "node_modules/@azure/storage-blob/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/@octokit/auth-token": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", @@ -178,11 +422,69 @@ "@octokit/openapi-types": "^11.2.0" } }, + "node_modules/@opentelemetry/api": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.1.0.tgz", + "integrity": "sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@types/node": { "version": "12.19.4", "resolved": "https://registry.npmjs.org/@types/node/-/node-12.19.4.tgz", - "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==", - "dev": true + "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==", + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/before-after-hook": { "version": "2.2.2", @@ -194,11 +496,92 @@ "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/cache": { + "version": "3.0.2", + "resolved": "git+ssh://git@github.com/actions/cache.git#95f200e41cfa87b8e07f30196c0df17a67e67786", + "license": "MIT", + "dependencies": { + "@actions/cache": "^2.0.2", + "@actions/core": "^1.7.0", + "@actions/exec": "^1.1.1", + "@actions/io": "^1.1.2" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", + "engines": { + "node": ">=4" + } + }, "node_modules/is-plain-object": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", @@ -207,6 +590,36 @@ "node": ">=0.10.0" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", @@ -262,11 +675,63 @@ "wrappy": "1" } }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "dependencies": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -293,6 +758,14 @@ "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -320,6 +793,26 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } } } } diff --git a/node_modules/@actions/cache/LICENSE.md b/node_modules/@actions/cache/LICENSE.md new file mode 100644 index 00000000..dbae2edb --- /dev/null +++ b/node_modules/@actions/cache/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/cache/README.md b/node_modules/@actions/cache/README.md new file mode 100644 index 00000000..541b1155 --- /dev/null +++ b/node_modules/@actions/cache/README.md @@ -0,0 +1,44 @@ +# `@actions/cache` + +> Functions necessary for caching dependencies and build outputs to improve workflow execution time. + +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows) for how caching works. + +Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. + +## Usage + +This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). + +#### Save Cache + +Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const cacheId = await cache.saveCache(paths, key) +``` + +#### Restore Cache + +Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const restoreKeys = [ + 'npm-foobar-', + 'npm-' +] +const cacheKey = await cache.restoreCache(paths, key, restoreKeys) +``` + diff --git a/node_modules/@actions/cache/lib/cache.d.ts b/node_modules/@actions/cache/lib/cache.d.ts new file mode 100644 index 00000000..16b20f75 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.d.ts @@ -0,0 +1,32 @@ +import { DownloadOptions, UploadOptions } from './options'; +export declare class ValidationError extends Error { + constructor(message: string); +} +export declare class ReserveCacheError extends Error { + constructor(message: string); +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +export declare function isFeatureAvailable(): boolean; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions): Promise; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +export declare function saveCache(paths: string[], key: string, options?: UploadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/cache.js b/node_modules/@actions/cache/lib/cache.js new file mode 100644 index 00000000..0b5a2a81 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js @@ -0,0 +1,183 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const path = __importStar(require("path")); +const utils = __importStar(require("./internal/cacheUtils")); +const cacheHttpClient = __importStar(require("./internal/cacheHttpClient")); +const tar_1 = require("./internal/tar"); +class ValidationError extends Error { + constructor(message) { + super(message); + this.name = 'ValidationError'; + Object.setPrototypeOf(this, ValidationError.prototype); + } +} +exports.ValidationError = ValidationError; +class ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = 'ReserveCacheError'; + Object.setPrototypeOf(this, ReserveCacheError.prototype); + } +} +exports.ReserveCacheError = ReserveCacheError; +function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + } +} +function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +function isFeatureAvailable() { + return !!process.env['ACTIONS_CACHE_URL']; +} +exports.isFeatureAvailable = isFeatureAvailable; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +function restoreCache(paths, primaryKey, restoreKeys, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core.debug('Resolved Keys:'); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + // path are needed to compute version + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // Cache not found + return undefined; + } + const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield tar_1.extractTar(archivePath, compressionMethod); + core.info('Cache restored successfully'); + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheEntry.cacheKey; + }); +} +exports.restoreCache = restoreCache; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + checkKey(key); + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = null; + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/cache.js.map b/node_modules/@actions/cache/lib/cache.js.map new file mode 100644 index 00000000..9a065505 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../src/cache.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,2CAA4B;AAC5B,6DAA8C;AAC9C,4EAA6D;AAC7D,wCAA6D;AAG7D,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CACF;AAND,0CAMC;AAED,MAAa,iBAAkB,SAAQ,KAAK;IAC1C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,mBAAmB,CAAA;QAC/B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,iBAAiB,CAAC,SAAS,CAAC,CAAA;IAC1D,CAAC;CACF;AAND,8CAMC;AAED,SAAS,UAAU,CAAC,KAAe;IACjC,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,MAAM,IAAI,eAAe,CACvB,wEAAwE,CACzE,CAAA;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,GAAW;IAC3B,IAAI,GAAG,CAAC,MAAM,GAAG,GAAG,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,wCAAwC,CACrE,CAAA;KACF;IACD,MAAM,KAAK,GAAG,SAAS,CAAA;IACvB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,yBAAyB,CACtD,CAAA;KACF;AACH,CAAC;AAED;;;;GAIG;AAEH,SAAgB,kBAAkB;IAChC,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAA;AAC3C,CAAC;AAFD,gDAEC;AAED;;;;;;;;GAQG;AACH,SAAsB,YAAY,CAChC,KAAe,EACf,UAAkB,EAClB,WAAsB,EACtB,OAAyB;;QAEzB,UAAU,CAAC,KAAK,CAAC,CAAA;QAEjB,WAAW,GAAG,WAAW,IAAI,EAAE,CAAA;QAC/B,MAAM,IAAI,GAAG,CAAC,UAAU,EAAE,GAAG,WAAW,CAAC,CAAA;QAEzC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAA;QAC5B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;QAEhC,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;YACpB,MAAM,IAAI,eAAe,CACvB,4DAA4D,CAC7D,CAAA;SACF;QACD,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;SACd;QAED,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAE5D,qCAAqC;QACrC,MAAM,UAAU,GAAG,MAAM,eAAe,CAAC,aAAa,CAAC,IAAI,EAAE,KAAK,EAAE;YAClE,iBAAiB;SAClB,CAAC,CAAA;QACF,IAAI,EAAC,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,eAAe,CAAA,EAAE;YAChC,kBAAkB;YAClB,OAAO,SAAS,CAAA;SACjB;QAED,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAC3B,MAAM,KAAK,CAAC,mBAAmB,EAAE,EACjC,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;QAE1C,IAAI;YACF,0CAA0C;YAC1C,MAAM,eAAe,CAAC,aAAa,CACjC,UAAU,CAAC,eAAe,EAC1B,WAAW,EACX,OAAO,CACR,CAAA;YAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,aAAO,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YAED,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CACxB,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,KAAK,CAC9B,CAAA;YAED,MAAM,gBAAU,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;YAChD,IAAI,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAA;SACzC;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,UAAU,CAAC,QAAQ,CAAA;IAC5B,CAAC;CAAA;AAvED,oCAuEC;AAED;;;;;;;GAOG;AACH,SAAsB,SAAS,CAC7B,KAAe,EACf,GAAW,EACX,OAAuB;;;QAEvB,UAAU,CAAC,KAAK,CAAC,CAAA;QACjB,QAAQ,CAAC,GAAG,CAAC,CAAA;QAEb,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAC5D,IAAI,OAAO,GAAG,IAAI,CAAA;QAElB,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,YAAY,CAAC,KAAK,CAAC,CAAA;QAClD,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,CAAC,CAAA;QAE3C,MAAM,aAAa,GAAG,MAAM,KAAK,CAAC,mBAAmB,EAAE,CAAA;QACvD,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAC3B,aAAa,EACb,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;QAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;QAE1C,IAAI;YACF,MAAM,eAAS,CAAC,aAAa,EAAE,UAAU,EAAE,iBAAiB,CAAC,CAAA;YAC7D,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,aAAO,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YACD,MAAM,aAAa,GAAG,EAAE,GAAG,IAAI,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,sBAAsB;YACpE,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,KAAK,CAAC,cAAc,eAAe,EAAE,CAAC,CAAA;YAE3C,2FAA2F;YAC3F,IAAI,eAAe,GAAG,aAAa,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,EAAE;gBACtD,MAAM,IAAI,KAAK,CACb,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,+CAA+C,CACxE,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;YAC7B,MAAM,oBAAoB,GAAG,MAAM,eAAe,CAAC,YAAY,CAC7D,GAAG,EACH,KAAK,EACL;gBACE,iBAAiB;gBACjB,SAAS,EAAE,eAAe;aAC3B,CACF,CAAA;YAED,UAAI,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,EAAE;gBACzC,OAAO,SAAG,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,CAAA;aAChD;iBAAM,IAAI,CAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,UAAU,MAAK,GAAG,EAAE;gBACnD,MAAM,IAAI,KAAK,aACb,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,mCAClC,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,mDAAmD,CAC9E,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,iBAAiB,CACzB,oCAAoC,GAAG,2DAA2D,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,EAAE,CACzI,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,qBAAqB,OAAO,GAAG,CAAC,CAAA;YAC3C,MAAM,eAAe,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;SAC/D;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,OAAO,CAAA;;CACf;AA9ED,8BA8EC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts new file mode 100644 index 00000000..0497cabd --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts @@ -0,0 +1,8 @@ +import { CompressionMethod } from './constants'; +import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts'; +import { DownloadOptions, UploadOptions } from '../options'; +export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod): string; +export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise; +export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise; +export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise>; +export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js new file mode 100644 index 00000000..cc331468 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js @@ -0,0 +1,213 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const crypto = __importStar(require("crypto")); +const fs = __importStar(require("fs")); +const url_1 = require("url"); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const downloadUtils_1 = require("./downloadUtils"); +const options_1 = require("../options"); +const requestUtils_1 = require("./requestUtils"); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + const baseUrl = process.env['ACTIONS_CACHE_URL'] || ''; + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod) { + const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip + ? [] + : [compressionMethod]); + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 204) { + return null; + } + if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = options_1.getDownloadOptions(options); + if (downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return response; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = options_1.getUploadOptions(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cacheHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map new file mode 100644 index 00000000..de9436a9 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheHttpClient.js","sourceRoot":"","sources":["../../src/internal/cacheHttpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAA+C;AAC/C,wDAAqE;AAKrE,+CAAgC;AAChC,uCAAwB;AACxB,6BAAuB;AAEvB,oDAAqC;AACrC,2CAA6C;AAS7C,mDAAgF;AAChF,wCAKmB;AACnB,iDAIuB;AAEvB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,SAAS,cAAc,CAAC,QAAgB;IACtC,MAAM,OAAO,GAAW,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAA;IAC9D,IAAI,CAAC,OAAO,EAAE;QACZ,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAA;KACzE;IAED,MAAM,GAAG,GAAG,GAAG,OAAO,uBAAuB,QAAQ,EAAE,CAAA;IACvD,IAAI,CAAC,KAAK,CAAC,iBAAiB,GAAG,EAAE,CAAC,CAAA;IAClC,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAY,EAAE,UAAkB;IAC1D,OAAO,GAAG,IAAI,gBAAgB,UAAU,EAAE,CAAA;AAC5C,CAAC;AAED,SAAS,iBAAiB;IACxB,MAAM,cAAc,GAAmB;QACrC,OAAO,EAAE;YACP,MAAM,EAAE,kBAAkB,CAAC,kBAAkB,EAAE,eAAe,CAAC;SAChE;KACF,CAAA;IAED,OAAO,cAAc,CAAA;AACvB,CAAC;AAED,SAAS,gBAAgB;IACvB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,IAAI,EAAE,CAAA;IACxD,MAAM,uBAAuB,GAAG,IAAI,8BAAuB,CAAC,KAAK,CAAC,CAAA;IAElE,OAAO,IAAI,wBAAU,CACnB,eAAe,EACf,CAAC,uBAAuB,CAAC,EACzB,iBAAiB,EAAE,CACpB,CAAA;AACH,CAAC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC;IAErC,MAAM,UAAU,GAAG,KAAK,CAAC,MAAM,CAC7B,CAAC,iBAAiB,IAAI,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QAChE,CAAC,CAAC,EAAE;QACJ,CAAC,CAAC,CAAC,iBAAiB,CAAC,CACxB,CAAA;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM;SACV,UAAU,CAAC,QAAQ,CAAC;SACpB,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAC5B,MAAM,CAAC,KAAK,CAAC,CAAA;AAClB,CAAC;AAjBD,0CAiBC;AAED,SAAsB,aAAa,CACjC,IAAc,EACd,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAAC,CAAA;QAClE,MAAM,QAAQ,GAAG,cAAc,kBAAkB,CAC/C,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CACf,YAAY,OAAO,EAAE,CAAA;QAEtB,MAAM,QAAQ,GAAG,MAAM,iCAAkB,CAAC,eAAe,EAAE,GAAS,EAAE,gDACpE,OAAA,UAAU,CAAC,OAAO,CAAqB,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAA,GAAA,CACjE,CAAA;QACD,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;YAC/B,OAAO,IAAI,CAAA;SACZ;QACD,IAAI,CAAC,kCAAmB,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC7C,MAAM,IAAI,KAAK,CAAC,gCAAgC,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAA;SACvE;QAED,MAAM,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAA;QACnC,MAAM,gBAAgB,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,eAAe,CAAA;QACrD,IAAI,CAAC,gBAAgB,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAA;SACpC;QACD,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAA;QAC3B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAA;QAEvC,OAAO,WAAW,CAAA;IACpB,CAAC;CAAA;AA/BD,sCA+BC;AAED,SAAsB,aAAa,CACjC,eAAuB,EACvB,WAAmB,EACnB,OAAyB;;QAEzB,MAAM,UAAU,GAAG,IAAI,SAAG,CAAC,eAAe,CAAC,CAAA;QAC3C,MAAM,eAAe,GAAG,4BAAkB,CAAC,OAAO,CAAC,CAAA;QAEnD,IACE,eAAe,CAAC,WAAW;YAC3B,UAAU,CAAC,QAAQ,CAAC,QAAQ,CAAC,wBAAwB,CAAC,EACtD;YACA,6FAA6F;YAC7F,MAAM,uCAAuB,CAAC,eAAe,EAAE,WAAW,EAAE,eAAe,CAAC,CAAA;SAC7E;aAAM;YACL,qDAAqD;YACrD,MAAM,uCAAuB,CAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;IACH,CAAC;CAAA;AAlBD,sCAkBC;AAED,gBAAgB;AAChB,SAAsB,YAAY,CAChC,GAAW,EACX,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAAC,CAAA;QAElE,MAAM,mBAAmB,GAAwB;YAC/C,GAAG;YACH,OAAO;YACP,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS;SAC9B,CAAA;QACD,MAAM,QAAQ,GAAG,MAAM,iCAAkB,CAAC,cAAc,EAAE,GAAS,EAAE;YACnE,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,QAAQ,CAAC,EACxB,mBAAmB,CACpB,CAAA;UAAA,CACF,CAAA;QACD,OAAO,QAAQ,CAAA;IACjB,CAAC;CAAA;AApBD,oCAoBC;AAED,SAAS,eAAe,CAAC,KAAa,EAAE,GAAW;IACjD,oCAAoC;IACpC,8BAA8B;IAC9B,oBAAoB;IACpB,2CAA2C;IAC3C,+BAA+B;IAC/B,OAAO,SAAS,KAAK,IAAI,GAAG,IAAI,CAAA;AAClC,CAAC;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,WAAmB,EACnB,UAAuC,EACvC,KAAa,EACb,GAAW;;QAEX,IAAI,CAAC,KAAK,CACR,2BAA2B,GAAG;YAC5B,KAAK;YACL,CAAC,oBAAoB,KAAK,wBAAwB,eAAe,CACjE,KAAK,EACL,GAAG,CACJ,EAAE,CACJ,CAAA;QACD,MAAM,iBAAiB,GAAG;YACxB,cAAc,EAAE,0BAA0B;YAC1C,eAAe,EAAE,eAAe,CAAC,KAAK,EAAE,GAAG,CAAC;SAC7C,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,sCAAuB,CACvD,uBAAuB,KAAK,UAAU,GAAG,GAAG,EAC5C,GAAS,EAAE;YACT,OAAA,UAAU,CAAC,UAAU,CACnB,OAAO,EACP,WAAW,EACX,UAAU,EAAE,EACZ,iBAAiB,CAClB,CAAA;UAAA,CACJ,CAAA;QAED,IAAI,CAAC,kCAAmB,CAAC,mBAAmB,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,OAAO,CAAC,UAAU,uBAAuB,CAC9F,CAAA;SACF;IACH,CAAC;CAAA;AAED,SAAe,UAAU,CACvB,UAAsB,EACtB,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,gBAAgB;QAChB,MAAM,QAAQ,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC7D,MAAM,WAAW,GAAG,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;QAClE,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;QACxC,MAAM,aAAa,GAAG,0BAAgB,CAAC,OAAO,CAAC,CAAA;QAE/C,MAAM,WAAW,GAAG,KAAK,CAAC,aAAa,CACrC,mBAAmB,EACnB,aAAa,CAAC,iBAAiB,CAChC,CAAA;QACD,MAAM,YAAY,GAAG,KAAK,CAAC,aAAa,CACtC,iBAAiB,EACjB,aAAa,CAAC,eAAe,CAC9B,CAAA;QAED,MAAM,eAAe,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC1D,IAAI,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAA;QAClC,IAAI,MAAM,GAAG,CAAC,CAAA;QAEd,IAAI;YACF,MAAM,OAAO,CAAC,GAAG,CACf,eAAe,CAAC,GAAG,CAAC,GAAS,EAAE;gBAC7B,OAAO,MAAM,GAAG,QAAQ,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,EAAE,YAAY,CAAC,CAAA;oBAC3D,MAAM,KAAK,GAAG,MAAM,CAAA;oBACpB,MAAM,GAAG,GAAG,MAAM,GAAG,SAAS,GAAG,CAAC,CAAA;oBAClC,MAAM,IAAI,YAAY,CAAA;oBAEtB,MAAM,WAAW,CACf,UAAU,EACV,WAAW,EACX,GAAG,EAAE,CACH,EAAE;yBACC,gBAAgB,CAAC,WAAW,EAAE;wBAC7B,EAAE;wBACF,KAAK;wBACL,GAAG;wBACH,SAAS,EAAE,KAAK;qBACjB,CAAC;yBACD,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,qDAAqD,KAAK,CAAC,OAAO,EAAE,CACrE,CAAA;oBACH,CAAC,CAAC,EACN,KAAK,EACL,GAAG,CACJ,CAAA;iBACF;YACH,CAAC,CAAA,CAAC,CACH,CAAA;SACF;gBAAS;YACR,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;SACjB;QACD,OAAM;IACR,CAAC;CAAA;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,OAAe,EACf,QAAgB;;QAEhB,MAAM,kBAAkB,GAAuB,EAAC,IAAI,EAAE,QAAQ,EAAC,CAAA;QAC/D,OAAO,MAAM,iCAAkB,CAAC,aAAa,EAAE,GAAS,EAAE;YACxD,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,EAC9C,kBAAkB,CACnB,CAAA;UAAA,CACF,CAAA;IACH,CAAC;CAAA;AAED,SAAsB,SAAS,CAC7B,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QAErC,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,MAAM,UAAU,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;QAE3D,eAAe;QACf,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;QAC7B,MAAM,SAAS,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC9D,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ,SAAS,KAAK,CAC5E,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7E,IAAI,CAAC,kCAAmB,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;YACxD,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,UAAU,uBAAuB,CACtF,CAAA;SACF;QAED,IAAI,CAAC,IAAI,CAAC,0BAA0B,CAAC,CAAA;IACvC,CAAC;CAAA;AAzBD,8BAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts new file mode 100644 index 00000000..ab2e930e --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts @@ -0,0 +1,12 @@ +/// +import * as fs from 'fs'; +import { CompressionMethod } from './constants'; +export declare function createTempDirectory(): Promise; +export declare function getArchiveFileSizeInBytes(filePath: string): number; +export declare function resolvePaths(patterns: string[]): Promise; +export declare function unlinkFile(filePath: fs.PathLike): Promise; +export declare function getCompressionMethod(): Promise; +export declare function getCacheFileName(compressionMethod: CompressionMethod): string; +export declare function isGnuTarInstalled(): Promise; +export declare function assertDefined(name: string, value?: T): T; +export declare function isGhes(): boolean; diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js b/node_modules/@actions/cache/lib/internal/cacheUtils.js new file mode 100644 index 00000000..9ab8a0b7 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js @@ -0,0 +1,175 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const exec = __importStar(require("@actions/exec")); +const glob = __importStar(require("@actions/glob")); +const io = __importStar(require("@actions/io")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const semver = __importStar(require("semver")); +const util = __importStar(require("util")); +const uuid_1 = require("uuid"); +const constants_1 = require("./constants"); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, uuid_1.v4()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeInBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +function resolvePaths(patterns) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Checking ${app} --version`); + let versionOutput = ''; + try { + yield exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } + const versionOutput = yield getVersion('zstd'); + const version = semver.clean(versionOutput); + if (!versionOutput.toLowerCase().includes('zstd command line interface')) { + // zstd is not installed + return constants_1.CompressionMethod.Gzip; + } + else if (!version || semver.lt(version, 'v1.3.2')) { + // zstd is installed but using a version earlier than v1.3.2 + // v1.3.2 is required to use the `--long` options in zstd + return constants_1.CompressionMethod.ZstdWithoutLong; + } + else { + return constants_1.CompressionMethod.Zstd; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function isGnuTarInstalled() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar'); + }); +} +exports.isGnuTarInstalled = isGnuTarInstalled; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +//# sourceMappingURL=cacheUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js.map b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map new file mode 100644 index 00000000..6d7360b4 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAA4D;AAE5D,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,SAAM,EAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,IAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA;gBAArC,MAAM,IAAI,WAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;aAC9B;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAjBD,oCAiBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CAAC,GAAW;;QACnC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC,CAAA;QACvC,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,YAAY,EAAE,EAAE,EAAE;gBACtC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,CAAC,MAAM,iBAAiB,EAAE,CAAC,EAAE;YAChE,sEAAsE;YACtE,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;QAED,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAE3C,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,6BAA6B,CAAC,EAAE;YACxE,wBAAwB;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM,IAAI,CAAC,OAAO,IAAI,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;YACnD,4DAA4D;YAC5D,yDAAyD;YACzD,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;aAAM;YACL,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;IACH,CAAC;CAAA;AAnBD,oDAmBC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,iBAAiB;;QACrC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAAA;AAHD,8CAGC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IACD,OAAO,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,YAAY,CAAA;AACtD,CAAC;AALD,wBAKC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.d.ts b/node_modules/@actions/cache/lib/internal/constants.d.ts new file mode 100644 index 00000000..36ac06e3 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.d.ts @@ -0,0 +1,12 @@ +export declare enum CacheFilename { + Gzip = "cache.tgz", + Zstd = "cache.tzst" +} +export declare enum CompressionMethod { + Gzip = "gzip", + ZstdWithoutLong = "zstd-without-long", + Zstd = "zstd" +} +export declare const DefaultRetryAttempts = 2; +export declare const DefaultRetryDelay = 5000; +export declare const SocketTimeout = 5000; diff --git a/node_modules/@actions/cache/lib/internal/constants.js b/node_modules/@actions/cache/lib/internal/constants.js new file mode 100644 index 00000000..301068ff --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var CacheFilename; +(function (CacheFilename) { + CacheFilename["Gzip"] = "cache.tgz"; + CacheFilename["Zstd"] = "cache.tzst"; +})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); +var CompressionMethod; +(function (CompressionMethod) { + CompressionMethod["Gzip"] = "gzip"; + // Long range mode was added to zstd in v1.3.2. + // This enum is for earlier version of zstd that does not have --long support + CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod["Zstd"] = "zstd"; +})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +// The default number of retry attempts. +exports.DefaultRetryAttempts = 2; +// The default delay in milliseconds between retry attempts. +exports.DefaultRetryDelay = 5000; +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.js.map b/node_modules/@actions/cache/lib/internal/constants.js.map new file mode 100644 index 00000000..ccdec94f --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAM5B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts new file mode 100644 index 00000000..23a38b7e --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts @@ -0,0 +1,74 @@ +import { TransferProgressEvent } from '@azure/ms-rest-js'; +import { DownloadOptions } from '../options'; +/** + * Class for tracking the download state and displaying stats. + */ +export declare class DownloadProgress { + contentLength: number; + segmentIndex: number; + segmentSize: number; + segmentOffset: number; + receivedBytes: number; + startTime: number; + displayedComplete: boolean; + timeoutHandle?: ReturnType; + constructor(contentLength: number); + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize: number): void; + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes: number): void; + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes(): number; + /** + * Returns true if the download is complete. + */ + isDone(): boolean; + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display(): void; + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress(): (progress: TransferProgressEvent) => void; + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs?: number): void; + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer(): void; +} +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +export declare function downloadCacheHttpClient(archiveLocation: string, archivePath: string): Promise; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +export declare function downloadCacheStorageSDK(archiveLocation: string, archivePath: string, options: DownloadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js b/node_modules/@actions/cache/lib/internal/downloadUtils.js new file mode 100644 index 00000000..7c88504e --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js @@ -0,0 +1,231 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const storage_blob_1 = require("@azure/storage-blob"); +const buffer = __importStar(require("buffer")); +const fs = __importStar(require("fs")); +const stream = __importStar(require("stream")); +const util = __importStar(require("util")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const requestUtils_1 = require("./requestUtils"); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. + const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + }); + fs.writeFileSync(fd, result); + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +//# sourceMappingURL=downloadUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js.map b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map new file mode 100644 index 00000000..2a308f2b --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"downloadUtils.js","sourceRoot":"","sources":["../../src/internal/downloadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAAmE;AACnE,sDAAmD;AAEnD,+CAAgC;AAChC,uCAAwB;AACxB,+CAAgC;AAChC,2CAA4B;AAE5B,oDAAqC;AACrC,2CAAyC;AAEzC,iDAAsD;AAEtD;;;;;GAKG;AACH,SAAe,oBAAoB,CACjC,QAA4B,EAC5B,MAA6B;;QAE7B,MAAM,QAAQ,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;QAChD,MAAM,QAAQ,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAC1C,CAAC;CAAA;AAED;;GAEG;AACH,MAAa,gBAAgB;IAU3B,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAAmB;QAC7B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,WAAW,CAAA;QAC1D,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACzC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;QAC9B,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QAEtB,IAAI,CAAC,KAAK,CACR,iCAAiC,IAAI,CAAC,aAAa,gBAAgB,IAAI,CAAC,WAAW,KAAK,CACzF,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,gBAAgB,CAAC,aAAqB;QACpC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;IACpC,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;IAChD,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;QAChE,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,aAAa,GAAG,CACpB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,YAAY,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,aAAa,UAAU,CACnG,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QAC7C,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAhID,4CAgIC;AAED;;;;;GAKG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB;;QAEnB,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACrD,MAAM,UAAU,GAAG,IAAI,wBAAU,CAAC,eAAe,CAAC,CAAA;QAClD,MAAM,gBAAgB,GAAG,MAAM,sCAAuB,CACpD,eAAe,EACf,GAAS,EAAE,gDAAC,OAAA,UAAU,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA,GAAA,CAC5C,CAAA;QAED,yDAAyD;QACzD,gBAAgB,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,yBAAa,EAAE,GAAG,EAAE;YAC7D,gBAAgB,CAAC,OAAO,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,CAAC,KAAK,CAAC,6CAA6C,yBAAa,KAAK,CAAC,CAAA;QAC7E,CAAC,CAAC,CAAA;QAEF,MAAM,oBAAoB,CAAC,gBAAgB,EAAE,WAAW,CAAC,CAAA;QAEzD,0BAA0B;QAC1B,MAAM,mBAAmB,GAAG,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;QAE9E,IAAI,mBAAmB,EAAE;YACvB,MAAM,cAAc,GAAG,QAAQ,CAAC,mBAAmB,CAAC,CAAA;YACpD,MAAM,YAAY,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YAEjE,IAAI,YAAY,KAAK,cAAc,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,4CAA4C,cAAc,uBAAuB,YAAY,EAAE,CAChG,CAAA;aACF;SACF;aAAM;YACL,IAAI,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;SACpE;IACH,CAAC;CAAA;AAlCD,0DAkCC;AAED;;;;;;;GAOG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB,EACnB,OAAwB;;;QAExB,MAAM,MAAM,GAAG,IAAI,8BAAe,CAAC,eAAe,EAAE,SAAS,EAAE;YAC7D,YAAY,EAAE;gBACZ,6DAA6D;gBAC7D,mDAAmD;gBACnD,cAAc,EAAE,OAAO,CAAC,WAAW;aACpC;SACF,CAAC,CAAA;QAEF,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,aAAa,EAAE,CAAA;QAC/C,MAAM,aAAa,SAAG,UAAU,CAAC,aAAa,mCAAI,CAAC,CAAC,CAAA;QAEpD,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,oFAAoF;YACpF,2BAA2B;YAC3B,IAAI,CAAC,KAAK,CACR,0EAA0E,CAC3E,CAAA;YAED,MAAM,uBAAuB,CAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;aAAM;YACL,4EAA4E;YAC5E,iFAAiF;YACjF,EAAE;YACF,mFAAmF;YACnF,gEAAgE;YAChE,oGAAoG;YACpG,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,CAAA;YACxE,MAAM,gBAAgB,GAAG,IAAI,gBAAgB,CAAC,aAAa,CAAC,CAAA;YAE5D,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;YAExC,IAAI;gBACF,gBAAgB,CAAC,iBAAiB,EAAE,CAAA;gBAEpC,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,EAAE;oBACjC,MAAM,YAAY,GAChB,gBAAgB,CAAC,aAAa,GAAG,gBAAgB,CAAC,WAAW,CAAA;oBAE/D,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAC1B,cAAc,EACd,aAAa,GAAG,YAAY,CAC7B,CAAA;oBAED,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC,CAAA;oBAEzC,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,gBAAgB,CAC1C,YAAY,EACZ,WAAW,EACX;wBACE,WAAW,EAAE,OAAO,CAAC,mBAAmB;wBACxC,UAAU,EAAE,gBAAgB,CAAC,UAAU,EAAE;qBAC1C,CACF,CAAA;oBAED,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;iBAC7B;aACF;oBAAS;gBACR,gBAAgB,CAAC,gBAAgB,EAAE,CAAA;gBACnC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;aACjB;SACF;;CACF;AAlED,0DAkEC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.d.ts b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts new file mode 100644 index 00000000..5ca50849 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts @@ -0,0 +1,8 @@ +import { HttpClientResponse } from '@actions/http-client'; +import { ITypedResponseWithError } from './contracts'; +export declare function isSuccessStatusCode(statusCode?: number): boolean; +export declare function isServerErrorStatusCode(statusCode?: number): boolean; +export declare function isRetryableStatusCode(statusCode?: number): boolean; +export declare function retry(name: string, method: () => Promise, getStatusCode: (arg0: T) => number | undefined, maxAttempts?: number, delay?: number, onError?: ((arg0: Error) => T | undefined) | undefined): Promise; +export declare function retryTypedResponse(name: string, method: () => Promise>, maxAttempts?: number, delay?: number): Promise>; +export declare function retryHttpClientResponse(name: string, method: () => Promise, maxAttempts?: number, delay?: number): Promise; diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js b/node_modules/@actions/cache/lib/internal/requestUtils.js new file mode 100644 index 00000000..42c2aba7 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js @@ -0,0 +1,120 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const constants_1 = require("./constants"); +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} +exports.isServerErrorStatusCode = isServerErrorStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { + return __awaiter(this, void 0, void 0, function* () { + let errorMessage = ''; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + try { + response = yield method(); + } + catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {}, + error + }; + } + else { + return undefined; + } + }); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js.map b/node_modules/@actions/cache/lib/internal/requestUtils.js.map new file mode 100644 index 00000000..67cfcaa3 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAI6B;AAC7B,2CAAmE;AAGnE,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,uBAAuB,CAAC,UAAmB;IACzD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAA;KACZ;IACD,OAAO,UAAU,IAAI,GAAG,CAAA;AAC1B,CAAC;AALD,0DAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,cAAc;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAVD,sDAUC;AAED,SAAe,KAAK,CAAC,YAAoB;;QACvC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,SAAsB,KAAK,CACzB,IAAY,EACZ,MAAwB,EACxB,aAA8C,EAC9C,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB,EACzB,UAAwD,SAAS;;QAEjE,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI,QAAQ,GAAkB,SAAS,CAAA;YACvC,IAAI,UAAU,GAAuB,SAAS,CAAA;YAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;YAEvB,IAAI;gBACF,QAAQ,GAAG,MAAM,MAAM,EAAE,CAAA;aAC1B;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,OAAO,EAAE;oBACX,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;iBAC1B;gBAED,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,QAAQ,EAAE;gBACZ,UAAU,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;gBAEpC,IAAI,CAAC,uBAAuB,CAAC,UAAU,CAAC,EAAE;oBACxC,OAAO,QAAQ,CAAA;iBAChB;aACF;YAED,IAAI,UAAU,EAAE;gBACd,WAAW,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,gCAAgC,UAAU,EAAE,CAAA;aAC5D;YAED,IAAI,CAAC,KAAK,CACR,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC9C,MAAK;aACN;YAED,MAAM,KAAK,CAAC,KAAK,CAAC,CAAA;YAClB,OAAO,EAAE,CAAA;SACV;QAED,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AAtDD,sBAsDC;AAED,SAAsB,kBAAkB,CACtC,IAAY,EACZ,MAAiD,EACjD,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAAoC,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK;QACL,8EAA8E;QAC9E,iEAAiE;QACjE,CAAC,KAAY,EAAE,EAAE;YACf,IAAI,KAAK,YAAY,6BAAe,EAAE;gBACpC,OAAO;oBACL,UAAU,EAAE,KAAK,CAAC,UAAU;oBAC5B,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;oBACX,KAAK;iBACN,CAAA;aACF;iBAAM;gBACL,OAAO,SAAS,CAAA;aACjB;QACH,CAAC,CACF,CAAA;IACH,CAAC;CAAA;AA3BD,gDA2BC;AAED,SAAsB,uBAAuB,CAC3C,IAAY,EACZ,MAAyC,EACzC,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAA4B,EAAE,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK,CACN,CAAA;IACH,CAAC;CAAA;AAbD,0DAaC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.d.ts b/node_modules/@actions/cache/lib/internal/tar.d.ts new file mode 100644 index 00000000..027b2e17 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.d.ts @@ -0,0 +1,4 @@ +import { CompressionMethod } from './constants'; +export declare function extractTar(archivePath: string, compressionMethod: CompressionMethod): Promise; +export declare function createTar(archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod): Promise; +export declare function listTar(archivePath: string, compressionMethod: CompressionMethod): Promise; diff --git a/node_modules/@actions/cache/lib/internal/tar.js b/node_modules/@actions/cache/lib/internal/tar.js new file mode 100644 index 00000000..5fbc67f9 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js @@ -0,0 +1,164 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const exec_1 = require("@actions/exec"); +const io = __importStar(require("@actions/io")); +const fs_1 = require("fs"); +const path = __importStar(require("path")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +function getTarPath(args, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + switch (process.platform) { + case 'win32': { + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); + } + else if (fs_1.existsSync(systemTar)) { + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); + } + break; + } + case 'darwin': { + const gnuTar = yield io.which('gtar', false); + if (gnuTar) { + // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 + args.push('--delay-directory-restore'); + return gnuTar; + } + break; + } + default: + break; + } + return yield io.which('tar', true); + }); +} +function execTar(args, compressionMethod, cwd) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); + }); +} +exports.extractTar = extractTar; +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -T0 --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -T0']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); + }); +} +exports.createTar = createTar; +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. + // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); + }); +} +exports.listTar = listTar; +//# sourceMappingURL=tar.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.js.map b/node_modules/@actions/cache/lib/internal/tar.js.map new file mode 100644 index 00000000..d3bf367b --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tar.js","sourceRoot":"","sources":["../../src/internal/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,wCAAkC;AAClC,gDAAiC;AACjC,2BAA4C;AAC5C,2CAA4B;AAC5B,oDAAqC;AACrC,2CAA6C;AAE7C,SAAe,UAAU,CACvB,IAAc,EACd,iBAAoC;;QAEpC,QAAQ,OAAO,CAAC,QAAQ,EAAE;YACxB,KAAK,OAAO,CAAC,CAAC;gBACZ,MAAM,SAAS,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,qBAAqB,CAAA;gBAC/D,IAAI,iBAAiB,KAAK,6BAAiB,CAAC,IAAI,EAAE;oBAChD,gFAAgF;oBAChF,wDAAwD;oBACxD,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;iBAC3B;qBAAM,IAAI,eAAU,CAAC,SAAS,CAAC,EAAE;oBAChC,OAAO,SAAS,CAAA;iBACjB;qBAAM,IAAI,MAAM,KAAK,CAAC,iBAAiB,EAAE,EAAE;oBAC1C,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;iBAC3B;gBACD,MAAK;aACN;YACD,KAAK,QAAQ,CAAC,CAAC;gBACb,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;gBAC5C,IAAI,MAAM,EAAE;oBACV,0HAA0H;oBAC1H,IAAI,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAA;oBACtC,OAAO,MAAM,CAAA;iBACd;gBACD,MAAK;aACN;YACD;gBACE,MAAK;SACR;QACD,OAAO,MAAM,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACpC,CAAC;CAAA;AAED,SAAe,OAAO,CACpB,IAAc,EACd,iBAAoC,EACpC,GAAY;;QAEZ,IAAI;YACF,MAAM,WAAI,CAAC,IAAI,MAAM,UAAU,CAAC,IAAI,EAAE,iBAAiB,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,GAAG,EAAC,CAAC,CAAA;SAC1E;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,0BAA0B,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EAAE,CAAC,CAAA;SAC5D;IACH,CAAC;CAAA;AAED,SAAS,mBAAmB;;IAC1B,aAAO,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;AACzD,CAAC;AAED,SAAsB,UAAU,CAC9B,WAAmB,EACnB,iBAAoC;;QAEpC,uCAAuC;QACvC,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAC9C,MAAM,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAA;QACjC,mBAAmB;QACnB,iHAAiH;QACjH,oEAAoE;QACpE,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,mBAAmB,CAAC,CAAA;gBACxD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAA;gBAC9C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC1D,IAAI;YACJ,IAAI;YACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;SAChE,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,CAAC,CAAA;IACxC,CAAC;CAAA;AA7BD,gCA6BC;AAED,SAAsB,SAAS,CAC7B,aAAqB,EACrB,iBAA2B,EAC3B,iBAAoC;;QAEpC,0EAA0E;QAC1E,MAAM,gBAAgB,GAAG,cAAc,CAAA;QACvC,MAAM,aAAa,GAAG,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAA;QAC/D,kBAAa,CACX,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,gBAAgB,CAAC,EAC1C,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,CAC7B,CAAA;QACD,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAE9C,+GAA+G;QAC/G,iHAAiH;QACjH,oEAAoE;QACpE,0GAA0G;QAC1G,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,oBAAoB,CAAC,CAAA;gBACzD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,UAAU,CAAC,CAAA;gBAC/C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,SAAS;YACT,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC5D,IAAI;YACJ,IAAI;YACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC/D,cAAc;YACd,gBAAgB;SACjB,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,EAAE,aAAa,CAAC,CAAA;IACvD,CAAC;CAAA;AAxCD,8BAwCC;AAED,SAAsB,OAAO,CAC3B,WAAmB,EACnB,iBAAoC;;QAEpC,mBAAmB;QACnB,wDAAwD;QACxD,2DAA2D;QAC3D,oEAAoE;QACpE,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,mBAAmB,CAAC,CAAA;gBACxD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAA;gBAC9C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC1D,IAAI;SACL,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,CAAC,CAAA;IACxC,CAAC;CAAA;AAzBD,0BAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.d.ts b/node_modules/@actions/cache/lib/options.d.ts new file mode 100644 index 00000000..0cb0d6e1 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.d.ts @@ -0,0 +1,56 @@ +/** + * Options to control cache upload + */ +export interface UploadOptions { + /** + * Number of parallel cache upload + * + * @default 4 + */ + uploadConcurrency?: number; + /** + * Maximum chunk size in bytes for cache upload + * + * @default 32MB + */ + uploadChunkSize?: number; +} +/** + * Options to control cache download + */ +export interface DownloadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default true + */ + useAzureSdk?: boolean; + /** + * Number of parallel downloads (this option only applies when using + * the Azure SDK) + * + * @default 8 + */ + downloadConcurrency?: number; + /** + * Maximum time for each download request, in milliseconds (this + * option only applies when using the Azure SDK) + * + * @default 30000 + */ + timeoutInMs?: number; +} +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +export declare function getUploadOptions(copy?: UploadOptions): UploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +export declare function getDownloadOptions(copy?: DownloadOptions): DownloadOptions; diff --git a/node_modules/@actions/cache/lib/options.js b/node_modules/@actions/cache/lib/options.js new file mode 100644 index 00000000..1a7877a6 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js @@ -0,0 +1,62 @@ +"use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.js.map b/node_modules/@actions/cache/lib/options.js.map new file mode 100644 index 00000000..2cda122e --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAqC;AAkDrC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,IAAI;QACjB,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;KACnB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;KACF;IAED,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAEzD,OAAO,MAAM,CAAA;AACf,CAAC;AA1BD,gDA0BC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/LICENSE b/node_modules/@actions/cache/node_modules/@actions/http-client/LICENSE new file mode 100644 index 00000000..5823a51c --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/LICENSE @@ -0,0 +1,21 @@ +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/README.md b/node_modules/@actions/cache/node_modules/@actions/http-client/README.md new file mode 100644 index 00000000..7e06adeb --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/README.md @@ -0,0 +1,73 @@ +# `@actions/http-client` + +A lightweight HTTP client optimized for building actions. + +## Features + + - HTTP client with TypeScript generics and async/await/Promises + - Typings included! + - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner + - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. + - Basic, Bearer and PAT Support out of the box. Extensible handlers for others. + - Redirects supported + +Features and releases [here](./RELEASES.md) + +## Install + +``` +npm install @actions/http-client --save +``` + +## Samples + +See the [tests](./__tests__) for detailed examples. + +## Errors + +### HTTP + +The HTTP client does not throw unless truly exceptional. + +* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. +* Redirects (3xx) will be followed by default. + +See the [tests](./__tests__) for detailed examples. + +## Debugging + +To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: + +```shell +export NODE_DEBUG=http +``` + +## Node support + +The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. + +## Support and Versioning + +We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat). + +## Contributing + +We welcome PRs. Please create an issue and if applicable, a design before proceeding with code. + +once: + +``` +npm install +``` + +To build: + +``` +npm run build +``` + +To run all tests: + +``` +npm test +``` diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.d.ts b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.d.ts new file mode 100644 index 00000000..8cc9fc3d --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.d.ts @@ -0,0 +1,26 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { HttpClientResponse } from './index'; +export declare class BasicCredentialHandler implements ifm.RequestHandler { + username: string; + password: string; + constructor(username: string, password: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class BearerCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js new file mode 100644 index 00000000..2c150a3d --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js @@ -0,0 +1,81 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js.map b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js.map new file mode 100644 index 00000000..7d3a18af --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/auth.js.map @@ -0,0 +1 @@ +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.d.ts b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.d.ts new file mode 100644 index 00000000..fe733d14 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.d.ts @@ -0,0 +1,123 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +export declare enum HttpCodes { + OK = 200, + MultipleChoices = 300, + MovedPermanently = 301, + ResourceMoved = 302, + SeeOther = 303, + NotModified = 304, + UseProxy = 305, + SwitchProxy = 306, + TemporaryRedirect = 307, + PermanentRedirect = 308, + BadRequest = 400, + Unauthorized = 401, + PaymentRequired = 402, + Forbidden = 403, + NotFound = 404, + MethodNotAllowed = 405, + NotAcceptable = 406, + ProxyAuthenticationRequired = 407, + RequestTimeout = 408, + Conflict = 409, + Gone = 410, + TooManyRequests = 429, + InternalServerError = 500, + NotImplemented = 501, + BadGateway = 502, + ServiceUnavailable = 503, + GatewayTimeout = 504 +} +export declare enum Headers { + Accept = "accept", + ContentType = "content-type" +} +export declare enum MediaTypes { + ApplicationJson = "application/json" +} +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +export declare function getProxyUrl(serverUrl: string): string; +export declare class HttpClientError extends Error { + constructor(message: string, statusCode: number); + statusCode: number; + result?: any; +} +export declare class HttpClientResponse { + constructor(message: http.IncomingMessage); + message: http.IncomingMessage; + readBody(): Promise; +} +export declare function isHttps(requestUrl: string): boolean; +export declare class HttpClient { + userAgent: string | undefined; + handlers: ifm.RequestHandler[]; + requestOptions: ifm.RequestOptions | undefined; + private _ignoreSslError; + private _socketTimeout; + private _allowRedirects; + private _allowRedirectDowngrade; + private _maxRedirects; + private _allowRetries; + private _maxRetries; + private _agent; + private _proxyAgent; + private _keepAlive; + private _disposed; + constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + postJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + putJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + patchJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose(): void; + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl: string): http.Agent; + private _prepareRequest; + private _mergeHeaders; + private _getExistingOrDefaultHeader; + private _getAgent; + private _performExponentialBackoff; + private _processResponse; +} diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js new file mode 100644 index 00000000..a1b7d032 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js @@ -0,0 +1,605 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +const pm = __importStar(require("./proxy")); +const tunnel = __importStar(require("tunnel")); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js.map b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js.map new file mode 100644 index 00000000..ca8ea415 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAEhC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,GAAP,eAAO,KAAP,eAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAnBD,gDAmBC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAiBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAf7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAGf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAS,GAAG;YAC1B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,QAAQ,EAAE;YAChC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,wFAAwF;QACxF,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE;YAC7B,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,gFAAgF;QAChF,IAAI,CAAC,KAAK,EAAE;YACV,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;SACxD;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAlpBD,gCAkpBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.d.ts b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.d.ts new file mode 100644 index 00000000..54fd4a89 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -0,0 +1,44 @@ +/// +import * as http from 'http'; +import * as https from 'https'; +import { HttpClientResponse } from './index'; +export interface HttpClient { + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise; + requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise; + requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void; +} +export interface RequestHandler { + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(response: HttpClientResponse): boolean; + handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; +} +export interface RequestInfo { + options: http.RequestOptions; + parsedUrl: URL; + httpModule: typeof http | typeof https; +} +export interface RequestOptions { + headers?: http.OutgoingHttpHeaders; + socketTimeout?: number; + ignoreSslError?: boolean; + allowRedirects?: boolean; + allowRedirectDowngrade?: boolean; + maxRedirects?: number; + maxSockets?: number; + keepAlive?: boolean; + deserializeDates?: boolean; + allowRetries?: boolean; + maxRetries?: number; +} +export interface TypedResponse { + statusCode: number; + result: T | null; + headers: http.IncomingHttpHeaders; +} diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js new file mode 100644 index 00000000..db919115 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js.map b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js.map new file mode 100644 index 00000000..8fb5f7d1 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.d.ts b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.d.ts new file mode 100644 index 00000000..45998654 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.d.ts @@ -0,0 +1,2 @@ +export declare function getProxyUrl(reqUrl: URL): URL | undefined; +export declare function checkBypass(reqUrl: URL): boolean; diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js new file mode 100644 index 00000000..528ffe40 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js.map b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js.map new file mode 100644 index 00000000..4440de9b --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/lib/proxy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,gBAAgB,CAAC,EAAE;YACnD,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AArCD,kCAqCC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/@actions/http-client/package.json b/node_modules/@actions/cache/node_modules/@actions/http-client/package.json new file mode 100644 index 00000000..c1de2213 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/@actions/http-client/package.json @@ -0,0 +1,48 @@ +{ + "name": "@actions/http-client", + "version": "2.0.1", + "description": "Actions Http Client", + "keywords": [ + "github", + "actions", + "http" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client", + "license": "MIT", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/http-client" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "devDependencies": { + "@types/tunnel": "0.0.3", + "proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6" + } +} diff --git a/node_modules/@actions/cache/package.json b/node_modules/@actions/cache/package.json new file mode 100644 index 00000000..0b7bff06 --- /dev/null +++ b/node_modules/@actions/cache/package.json @@ -0,0 +1,55 @@ +{ + "name": "@actions/cache", + "version": "2.0.4", + "preview": true, + "description": "Actions cache lib", + "keywords": [ + "github", + "actions", + "cache" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/cache", + "license": "MIT", + "main": "lib/cache.js", + "types": "lib/cache.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/cache" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + }, + "devDependencies": { + "@types/semver": "^6.0.0", + "@types/uuid": "^3.4.5", + "typescript": "^3.8.3" + } +} diff --git a/node_modules/@actions/core/lib/core.d.ts b/node_modules/@actions/core/lib/core.d.ts index 356872ec..69b106b8 100644 --- a/node_modules/@actions/core/lib/core.d.ts +++ b/node_modules/@actions/core/lib/core.d.ts @@ -184,3 +184,11 @@ export declare function saveState(name: string, value: any): void; */ export declare function getState(name: string): string; export declare function getIDToken(aud?: string): Promise; +/** + * Summary exports + */ +export { summary } from './summary'; +/** + * @deprecated use core.summary + */ +export { markdownSummary } from './summary'; diff --git a/node_modules/@actions/core/lib/core.js b/node_modules/@actions/core/lib/core.js index e0ceced1..f5dd7a49 100644 --- a/node_modules/@actions/core/lib/core.js +++ b/node_modules/@actions/core/lib/core.js @@ -309,4 +309,14 @@ function getIDToken(aud) { }); } exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = require("./summary"); +Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } }); +/** + * @deprecated use core.summary + */ +var summary_2 = require("./summary"); +Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } }); //# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.js.map b/node_modules/@actions/core/lib/core.js.map index 087a91d8..e41628d8 100644 --- a/node_modules/@actions/core/lib/core.js.map +++ b/node_modules/@actions/core/lib/core.js.map @@ -1 +1 @@ -{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAA+D;AAC/D,mCAA2D;AAE3D,uCAAwB;AACxB,2CAA4B;AAE5B,6CAAuC;AAavC;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAuCD,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,MAAM,SAAS,GAAG,qCAAqC,CAAA;QACvD,MAAM,YAAY,GAAG,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,YAAY,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;QACzF,2BAAgB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;KACtC;SAAM;QACL,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;KAC9C;AACH,CAAC;AAZD,wCAYC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,2BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,OAAO,MAAM,CAAA;AACf,CAAC;AATD,8CASC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAHD,8BAGC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;;GAIG;AACH,SAAgB,KAAK,CACnB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,OAAO,EACP,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,sBASC;AAED;;;;GAIG;AACH,SAAgB,OAAO,CACrB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,SAAS,EACT,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,0BASC;AAED;;;;GAIG;AACH,SAAgB,MAAM,CACpB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,QAAQ,EACR,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,wBASC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC;AAED,SAAsB,UAAU,CAAC,GAAY;;QAC3C,OAAO,MAAM,uBAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACzC,CAAC;CAAA;AAFD,gCAEC"} \ No newline at end of file +{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAA+D;AAC/D,mCAA2D;AAE3D,uCAAwB;AACxB,2CAA4B;AAE5B,6CAAuC;AAavC;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAuCD,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,MAAM,SAAS,GAAG,qCAAqC,CAAA;QACvD,MAAM,YAAY,GAAG,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,YAAY,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;QACzF,2BAAgB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;KACtC;SAAM;QACL,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;KAC9C;AACH,CAAC;AAZD,wCAYC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,2BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,OAAO,MAAM,CAAA;AACf,CAAC;AATD,8CASC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAHD,8BAGC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;;GAIG;AACH,SAAgB,KAAK,CACnB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,OAAO,EACP,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,sBASC;AAED;;;;GAIG;AACH,SAAgB,OAAO,CACrB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,SAAS,EACT,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,0BASC;AAED;;;;GAIG;AACH,SAAgB,MAAM,CACpB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,QAAQ,EACR,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,wBASC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC;AAED,SAAsB,UAAU,CAAC,GAAY;;QAC3C,OAAO,MAAM,uBAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACzC,CAAC;CAAA;AAFD,gCAEC;AAED;;GAEG;AACH,qCAAiC;AAAzB,kGAAA,OAAO,OAAA;AAEf;;GAEG;AACH,qCAAyC;AAAjC,0GAAA,eAAe,OAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/oidc-utils.js b/node_modules/@actions/core/lib/oidc-utils.js index 9ee10faf..f7012770 100644 --- a/node_modules/@actions/core/lib/oidc-utils.js +++ b/node_modules/@actions/core/lib/oidc-utils.js @@ -11,7 +11,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", { value: true }); exports.OidcClient = void 0; const http_client_1 = require("@actions/http-client"); -const auth_1 = require("@actions/http-client/auth"); +const auth_1 = require("@actions/http-client/lib/auth"); const core_1 = require("./core"); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { diff --git a/node_modules/@actions/core/lib/oidc-utils.js.map b/node_modules/@actions/core/lib/oidc-utils.js.map index 0ddbca92..284fa1d3 100644 --- a/node_modules/@actions/core/lib/oidc-utils.js.map +++ b/node_modules/@actions/core/lib/oidc-utils.js.map @@ -1 +1 @@ -{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,oDAAiE;AACjE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAoB;YACtC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} \ No newline at end of file +{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.d.ts b/node_modules/@actions/core/lib/summary.d.ts new file mode 100644 index 00000000..bb792555 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.d.ts @@ -0,0 +1,202 @@ +export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY"; +export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary"; +export declare type SummaryTableRow = (SummaryTableCell | string)[]; +export interface SummaryTableCell { + /** + * Cell content + */ + data: string; + /** + * Render cell as header + * (optional) default: false + */ + header?: boolean; + /** + * Number of columns the cell extends + * (optional) default: '1' + */ + colspan?: string; + /** + * Number of rows the cell extends + * (optional) default: '1' + */ + rowspan?: string; +} +export interface SummaryImageOptions { + /** + * The width of the image in pixels. Must be an integer without a unit. + * (optional) + */ + width?: string; + /** + * The height of the image in pixels. Must be an integer without a unit. + * (optional) + */ + height?: string; +} +export interface SummaryWriteOptions { + /** + * Replace all existing content in summary file with buffer contents + * (optional) default: false + */ + overwrite?: boolean; +} +declare class Summary { + private _buffer; + private _filePath?; + constructor(); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + private filePath; + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + private wrap; + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options?: SummaryWriteOptions): Promise; + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear(): Promise; + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify(): string; + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer(): boolean; + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer(): Summary; + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text: string, addEOL?: boolean): Summary; + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL(): Summary; + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code: string, lang?: string): Summary; + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items: string[], ordered?: boolean): Summary; + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows: SummaryTableRow[]): Summary; + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label: string, content: string): Summary; + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src: string, alt: string, options?: SummaryImageOptions): Summary; + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text: string, level?: number | string): Summary; + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator(): Summary; + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak(): Summary; + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text: string, cite?: string): Summary; + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text: string, href: string): Summary; +} +/** + * @deprecated use `core.summary` + */ +export declare const markdownSummary: Summary; +export declare const summary: Summary; +export {}; diff --git a/node_modules/@actions/core/lib/summary.js b/node_modules/@actions/core/lib/summary.js new file mode 100644 index 00000000..04a335b8 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js @@ -0,0 +1,283 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = require("os"); +const fs_1 = require("fs"); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.js.map b/node_modules/@actions/core/lib/summary.js.map new file mode 100644 index 00000000..d598f264 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js.map @@ -0,0 +1 @@ +{"version":3,"file":"summary.js","sourceRoot":"","sources":["../src/summary.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2BAAsB;AACtB,2BAAsC;AACtC,MAAM,EAAC,MAAM,EAAE,UAAU,EAAE,SAAS,EAAC,GAAG,aAAQ,CAAA;AAEnC,QAAA,eAAe,GAAG,qBAAqB,CAAA;AACvC,QAAA,gBAAgB,GAC3B,2GAA2G,CAAA;AA+C7G,MAAM,OAAO;IAIX;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IAED;;;;;OAKG;IACW,QAAQ;;YACpB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,OAAO,IAAI,CAAC,SAAS,CAAA;aACtB;YAED,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAe,CAAC,CAAA;YAChD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CACb,4CAA4C,uBAAe,6DAA6D,CACzH,CAAA;aACF;YAED,IAAI;gBACF,MAAM,MAAM,CAAC,WAAW,EAAE,cAAS,CAAC,IAAI,GAAG,cAAS,CAAC,IAAI,CAAC,CAAA;aAC3D;YAAC,WAAM;gBACN,MAAM,IAAI,KAAK,CACb,mCAAmC,WAAW,0DAA0D,CACzG,CAAA;aACF;YAED,IAAI,CAAC,SAAS,GAAG,WAAW,CAAA;YAC5B,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;KAAA;IAED;;;;;;;;OAQG;IACK,IAAI,CACV,GAAW,EACX,OAAsB,EACtB,QAAuC,EAAE;QAEzC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACpC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,GAAG,KAAK,KAAK,GAAG,CAAC;aAC3C,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,IAAI,GAAG,GAAG,SAAS,GAAG,CAAA;SAC9B;QAED,OAAO,IAAI,GAAG,GAAG,SAAS,IAAI,OAAO,KAAK,GAAG,GAAG,CAAA;IAClD,CAAC;IAED;;;;;;OAMG;IACG,KAAK,CAAC,OAA6B;;YACvC,MAAM,SAAS,GAAG,CAAC,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,CAAA,CAAA;YACtC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAA;YACpD,MAAM,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAA;YAC3D,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;QAC3B,CAAC;KAAA;IAED;;;;OAIG;IACG,KAAK;;YACT,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,EAAC,SAAS,EAAE,IAAI,EAAC,CAAC,CAAA;QACpD,CAAC;KAAA;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED;;;;OAIG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,CAAA;IAClC,CAAC;IAED;;;;OAIG;IACH,WAAW;QACT,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,IAAY,EAAE,MAAM,GAAG,KAAK;QACjC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAA;QACpB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,MAAM,CAAC,QAAG,CAAC,CAAA;IACzB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,IAAY,EAAE,IAAa;QACtC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,KAAe,EAAE,OAAO,GAAG,KAAK;QACtC,MAAM,GAAG,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;QACjC,MAAM,SAAS,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACnE,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAA;QACzC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;OAMG;IACH,QAAQ,CAAC,IAAuB;QAC9B,MAAM,SAAS,GAAG,IAAI;aACnB,GAAG,CAAC,GAAG,CAAC,EAAE;YACT,MAAM,KAAK,GAAG,GAAG;iBACd,GAAG,CAAC,IAAI,CAAC,EAAE;gBACV,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC5B,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;iBAC7B;gBAED,MAAM,EAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAC,GAAG,IAAI,CAAA;gBAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;gBAChC,MAAM,KAAK,mCACN,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,GACtB,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,CAC1B,CAAA;gBAED,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACpC,CAAC,CAAC;iBACD,IAAI,CAAC,EAAE,CAAC,CAAA;YAEX,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QAC/B,CAAC,CAAC;aACD,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,OAAe;QACvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO,CAAC,CAAA;QAC3E,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;;OAQG;IACH,QAAQ,CAAC,GAAW,EAAE,GAAW,EAAE,OAA6B;QAC9D,MAAM,EAAC,KAAK,EAAE,MAAM,EAAC,GAAG,OAAO,IAAI,EAAE,CAAA;QACrC,MAAM,KAAK,mCACN,CAAC,KAAK,IAAI,EAAC,KAAK,EAAC,CAAC,GAClB,CAAC,MAAM,IAAI,EAAC,MAAM,EAAC,CAAC,CACxB,CAAA;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,kBAAG,GAAG,EAAE,GAAG,IAAK,KAAK,EAAE,CAAA;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,IAAY,EAAE,KAAuB;QAC9C,MAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;QACvB,MAAM,UAAU,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;YACnE,CAAC,CAAC,GAAG;YACL,CAAC,CAAC,IAAI,CAAA;QACR,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAA;QAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,YAAY;QACV,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACN,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,QAAQ,CAAC,IAAY,EAAE,IAAa;QAClC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QACpD,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,IAAY,EAAE,IAAY;QAChC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,IAAI,EAAC,CAAC,CAAA;QAC5C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;CACF;AAED,MAAM,QAAQ,GAAG,IAAI,OAAO,EAAE,CAAA;AAE9B;;GAEG;AACU,QAAA,eAAe,GAAG,QAAQ,CAAA;AAC1B,QAAA,OAAO,GAAG,QAAQ,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/LICENSE b/node_modules/@actions/core/node_modules/@actions/http-client/LICENSE new file mode 100644 index 00000000..5823a51c --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/LICENSE @@ -0,0 +1,21 @@ +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/README.md b/node_modules/@actions/core/node_modules/@actions/http-client/README.md new file mode 100644 index 00000000..7e06adeb --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/README.md @@ -0,0 +1,73 @@ +# `@actions/http-client` + +A lightweight HTTP client optimized for building actions. + +## Features + + - HTTP client with TypeScript generics and async/await/Promises + - Typings included! + - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner + - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. + - Basic, Bearer and PAT Support out of the box. Extensible handlers for others. + - Redirects supported + +Features and releases [here](./RELEASES.md) + +## Install + +``` +npm install @actions/http-client --save +``` + +## Samples + +See the [tests](./__tests__) for detailed examples. + +## Errors + +### HTTP + +The HTTP client does not throw unless truly exceptional. + +* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. +* Redirects (3xx) will be followed by default. + +See the [tests](./__tests__) for detailed examples. + +## Debugging + +To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: + +```shell +export NODE_DEBUG=http +``` + +## Node support + +The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. + +## Support and Versioning + +We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat). + +## Contributing + +We welcome PRs. Please create an issue and if applicable, a design before proceeding with code. + +once: + +``` +npm install +``` + +To build: + +``` +npm run build +``` + +To run all tests: + +``` +npm test +``` diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.d.ts b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.d.ts new file mode 100644 index 00000000..8cc9fc3d --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.d.ts @@ -0,0 +1,26 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { HttpClientResponse } from './index'; +export declare class BasicCredentialHandler implements ifm.RequestHandler { + username: string; + password: string; + constructor(username: string, password: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class BearerCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js new file mode 100644 index 00000000..2c150a3d --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js @@ -0,0 +1,81 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js.map b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js.map new file mode 100644 index 00000000..7d3a18af --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/auth.js.map @@ -0,0 +1 @@ +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"} \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.d.ts b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.d.ts new file mode 100644 index 00000000..fe733d14 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.d.ts @@ -0,0 +1,123 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +export declare enum HttpCodes { + OK = 200, + MultipleChoices = 300, + MovedPermanently = 301, + ResourceMoved = 302, + SeeOther = 303, + NotModified = 304, + UseProxy = 305, + SwitchProxy = 306, + TemporaryRedirect = 307, + PermanentRedirect = 308, + BadRequest = 400, + Unauthorized = 401, + PaymentRequired = 402, + Forbidden = 403, + NotFound = 404, + MethodNotAllowed = 405, + NotAcceptable = 406, + ProxyAuthenticationRequired = 407, + RequestTimeout = 408, + Conflict = 409, + Gone = 410, + TooManyRequests = 429, + InternalServerError = 500, + NotImplemented = 501, + BadGateway = 502, + ServiceUnavailable = 503, + GatewayTimeout = 504 +} +export declare enum Headers { + Accept = "accept", + ContentType = "content-type" +} +export declare enum MediaTypes { + ApplicationJson = "application/json" +} +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +export declare function getProxyUrl(serverUrl: string): string; +export declare class HttpClientError extends Error { + constructor(message: string, statusCode: number); + statusCode: number; + result?: any; +} +export declare class HttpClientResponse { + constructor(message: http.IncomingMessage); + message: http.IncomingMessage; + readBody(): Promise; +} +export declare function isHttps(requestUrl: string): boolean; +export declare class HttpClient { + userAgent: string | undefined; + handlers: ifm.RequestHandler[]; + requestOptions: ifm.RequestOptions | undefined; + private _ignoreSslError; + private _socketTimeout; + private _allowRedirects; + private _allowRedirectDowngrade; + private _maxRedirects; + private _allowRetries; + private _maxRetries; + private _agent; + private _proxyAgent; + private _keepAlive; + private _disposed; + constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + postJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + putJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + patchJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose(): void; + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl: string): http.Agent; + private _prepareRequest; + private _mergeHeaders; + private _getExistingOrDefaultHeader; + private _getAgent; + private _performExponentialBackoff; + private _processResponse; +} diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js new file mode 100644 index 00000000..a1b7d032 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js @@ -0,0 +1,605 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +const pm = __importStar(require("./proxy")); +const tunnel = __importStar(require("tunnel")); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js.map b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js.map new file mode 100644 index 00000000..ca8ea415 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAEhC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,GAAP,eAAO,KAAP,eAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAnBD,gDAmBC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAiBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAf7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAGf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAS,GAAG;YAC1B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,QAAQ,EAAE;YAChC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,wFAAwF;QACxF,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE;YAC7B,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,gFAAgF;QAChF,IAAI,CAAC,KAAK,EAAE;YACV,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;SACxD;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAlpBD,gCAkpBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.d.ts b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.d.ts new file mode 100644 index 00000000..54fd4a89 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -0,0 +1,44 @@ +/// +import * as http from 'http'; +import * as https from 'https'; +import { HttpClientResponse } from './index'; +export interface HttpClient { + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise; + requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise; + requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void; +} +export interface RequestHandler { + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(response: HttpClientResponse): boolean; + handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; +} +export interface RequestInfo { + options: http.RequestOptions; + parsedUrl: URL; + httpModule: typeof http | typeof https; +} +export interface RequestOptions { + headers?: http.OutgoingHttpHeaders; + socketTimeout?: number; + ignoreSslError?: boolean; + allowRedirects?: boolean; + allowRedirectDowngrade?: boolean; + maxRedirects?: number; + maxSockets?: number; + keepAlive?: boolean; + deserializeDates?: boolean; + allowRetries?: boolean; + maxRetries?: number; +} +export interface TypedResponse { + statusCode: number; + result: T | null; + headers: http.IncomingHttpHeaders; +} diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js new file mode 100644 index 00000000..db919115 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js.map b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js.map new file mode 100644 index 00000000..8fb5f7d1 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.d.ts b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.d.ts new file mode 100644 index 00000000..45998654 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.d.ts @@ -0,0 +1,2 @@ +export declare function getProxyUrl(reqUrl: URL): URL | undefined; +export declare function checkBypass(reqUrl: URL): boolean; diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js new file mode 100644 index 00000000..528ffe40 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js.map b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js.map new file mode 100644 index 00000000..4440de9b --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/lib/proxy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,gBAAgB,CAAC,EAAE;YACnD,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AArCD,kCAqCC"} \ No newline at end of file diff --git a/node_modules/@actions/core/node_modules/@actions/http-client/package.json b/node_modules/@actions/core/node_modules/@actions/http-client/package.json new file mode 100644 index 00000000..c1de2213 --- /dev/null +++ b/node_modules/@actions/core/node_modules/@actions/http-client/package.json @@ -0,0 +1,48 @@ +{ + "name": "@actions/http-client", + "version": "2.0.1", + "description": "Actions Http Client", + "keywords": [ + "github", + "actions", + "http" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client", + "license": "MIT", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/http-client" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "devDependencies": { + "@types/tunnel": "0.0.3", + "proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6" + } +} diff --git a/node_modules/@actions/core/package.json b/node_modules/@actions/core/package.json index 8d7a3997..7a0129c0 100644 --- a/node_modules/@actions/core/package.json +++ b/node_modules/@actions/core/package.json @@ -1,6 +1,6 @@ { "name": "@actions/core", - "version": "1.6.0", + "version": "1.8.2", "description": "Actions core lib", "keywords": [ "github", @@ -36,7 +36,7 @@ "url": "https://github.com/actions/toolkit/issues" }, "dependencies": { - "@actions/http-client": "^1.0.11" + "@actions/http-client": "^2.0.1" }, "devDependencies": { "@types/node": "^12.0.2" diff --git a/node_modules/@actions/exec/package.json b/node_modules/@actions/exec/package.json index ff0cebfe..bc4d77a2 100644 --- a/node_modules/@actions/exec/package.json +++ b/node_modules/@actions/exec/package.json @@ -1,6 +1,6 @@ { "name": "@actions/exec", - "version": "1.1.0", + "version": "1.1.1", "description": "Actions exec lib", "keywords": [ "github", diff --git a/node_modules/@actions/glob/LICENSE.md b/node_modules/@actions/glob/LICENSE.md new file mode 100644 index 00000000..dbae2edb --- /dev/null +++ b/node_modules/@actions/glob/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/glob/README.md b/node_modules/@actions/glob/README.md new file mode 100644 index 00000000..9575beaa --- /dev/null +++ b/node_modules/@actions/glob/README.md @@ -0,0 +1,113 @@ +# `@actions/glob` + +## Usage + +### Basic + +You can use this package to search for files matching glob patterns. + +Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory. + +```js +const glob = require('@actions/glob'); + +const patterns = ['**/tar.gz', '**/tar.bz'] +const globber = await glob.create(patterns.join('\n')) +const files = await globber.glob() +``` + +### Opt out of following symbolic links + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**', {followSymbolicLinks: false}) +const files = await globber.glob() +``` + +### Iterator + +When dealing with a large amount of results, consider iterating the results as they are returned: + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**') +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Recommended action inputs + +Glob follows symbolic links by default. Following is often appropriate unless deleting files. + +Users may want to opt-out from following symbolic links for other reasons. For example, +excessive amounts of symbolic links can create the appearance of very, very many files +and slow the search. + +When an action allows a user to specify input patterns, it is generally recommended to +allow users to opt-out from following symbolic links. + +Snippet from `action.yml`: + +```yaml +inputs: + files: + description: 'Files to print' + required: true + follow-symbolic-links: + description: 'Indicates whether to follow symbolic links' + default: true +``` + +And corresponding toolkit consumption: + +```js +const core = require('@actions/core') +const glob = require('@actions/glob') + +const globOptions = { + followSymbolicLinks: core.getInput('follow-symbolic-links').toUpper() !== 'FALSE' +} +const globber = glob.create(core.getInput('files'), globOptions) +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Patterns + +### Glob behavior + +Patterns `*`, `?`, `[...]`, `**` (globstar) are supported. + +With the following behaviors: +- File names that begin with `.` may be included in the results +- Case insensitive on Windows +- Directory separator `/` and `\` both supported on Windows + +### Tilde expansion + +Supports basic tilde expansion, for current user HOME replacement only. + +Example: +- `~` may expand to /Users/johndoe +- `~/foo` may expand to /Users/johndoe/foo + +### Comments + +Patterns that begin with `#` are treated as comments. + +### Exclude patterns + +Leading `!` changes the meaning of an include pattern to exclude. + +Multiple leading `!` flips the meaning. + +### Escaping + +Wrapping special characters in `[]` can be used to escape literal glob characters +in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`. + +On Linux/macOS `\` is also treated as an escape character. diff --git a/node_modules/@actions/glob/lib/glob.d.ts b/node_modules/@actions/glob/lib/glob.d.ts new file mode 100644 index 00000000..56424fc3 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.d.ts @@ -0,0 +1,10 @@ +import { Globber } from './internal-globber'; +import { GlobOptions } from './internal-glob-options'; +export { Globber, GlobOptions }; +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +export declare function create(patterns: string, options?: GlobOptions): Promise; diff --git a/node_modules/@actions/glob/lib/glob.js b/node_modules/@actions/glob/lib/glob.js new file mode 100644 index 00000000..58cb6833 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js @@ -0,0 +1,26 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const internal_globber_1 = require("./internal-globber"); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/glob.js.map b/node_modules/@actions/glob/lib/glob.js.map new file mode 100644 index 00000000..b27e7e4a --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../src/glob.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,yDAA0D;AAK1D;;;;;GAKG;AACH,SAAsB,MAAM,CAC1B,QAAgB,EAChB,OAAqB;;QAErB,OAAO,MAAM,iCAAc,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAA;IACvD,CAAC;CAAA;AALD,wBAKC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts new file mode 100644 index 00000000..8a789606 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts @@ -0,0 +1,5 @@ +import { GlobOptions } from './internal-glob-options'; +/** + * Returns a copy with defaults filled in. + */ +export declare function getOptions(copy?: GlobOptions): GlobOptions; diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js b/node_modules/@actions/glob/lib/internal-glob-options-helper.js new file mode 100644 index 00000000..974dfd03 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js @@ -0,0 +1,50 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOptions = void 0; +const core = __importStar(require("@actions/core")); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map new file mode 100644 index 00000000..93376ddc --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options-helper.js","sourceRoot":"","sources":["../src/internal-glob-options-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAGrC;;GAEG;AACH,SAAgB,UAAU,CAAC,IAAkB;IAC3C,MAAM,MAAM,GAAgB;QAC1B,mBAAmB,EAAE,IAAI;QACzB,mBAAmB,EAAE,IAAI;QACzB,uBAAuB,EAAE,IAAI;KAC9B,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;YAC7D,IAAI,CAAC,KAAK,CAAC,4BAA4B,MAAM,CAAC,uBAAuB,GAAG,CAAC,CAAA;SAC1E;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAzBD,gCAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.d.ts b/node_modules/@actions/glob/lib/internal-glob-options.d.ts new file mode 100644 index 00000000..c1cac0f1 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.d.ts @@ -0,0 +1,29 @@ +/** + * Options to control globbing behavior + */ +export interface GlobOptions { + /** + * Indicates whether to follow symbolic links. Generally should set to false + * when deleting files. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Indicates whether directories that match a glob pattern, should implicitly + * cause all descendant paths to be matched. + * + * For example, given the directory `my-dir`, the following glob patterns + * would produce the same results: `my-dir/**`, `my-dir/`, `my-dir` + * + * @default true + */ + implicitDescendants?: boolean; + /** + * Indicates whether broken symbolic should be ignored and omitted from the + * result set. Otherwise an error will be thrown. + * + * @default true + */ + omitBrokenSymbolicLinks?: boolean; +} diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js b/node_modules/@actions/glob/lib/internal-glob-options.js new file mode 100644 index 00000000..8960c70e --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-glob-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js.map b/node_modules/@actions/glob/lib/internal-glob-options.js.map new file mode 100644 index 00000000..dec79532 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options.js","sourceRoot":"","sources":["../src/internal-glob-options.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.d.ts b/node_modules/@actions/glob/lib/internal-globber.d.ts new file mode 100644 index 00000000..6d0036bf --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.d.ts @@ -0,0 +1,42 @@ +import { GlobOptions } from './internal-glob-options'; +export { GlobOptions }; +/** + * Used to match files and directories + */ +export interface Globber { + /** + * Returns the search path preceding the first glob segment, from each pattern. + * Duplicates and descendants of other paths are filtered out. + * + * Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`. + * + * Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`. + */ + getSearchPaths(): string[]; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + glob(): Promise; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + globGenerator(): AsyncGenerator; +} +export declare class DefaultGlobber implements Globber { + private readonly options; + private readonly patterns; + private readonly searchPaths; + private constructor(); + getSearchPaths(): string[]; + glob(): Promise; + globGenerator(): AsyncGenerator; + /** + * Constructs a DefaultGlobber + */ + static create(patterns: string, options?: GlobOptions): Promise; + private static stat; +} diff --git a/node_modules/@actions/glob/lib/internal-globber.js b/node_modules/@actions/glob/lib/internal-globber.js new file mode 100644 index 00000000..a6695b5c --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js @@ -0,0 +1,235 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultGlobber = void 0; +const core = __importStar(require("@actions/core")); +const fs = __importStar(require("fs")); +const globOptionsHelper = __importStar(require("./internal-glob-options-helper")); +const path = __importStar(require("path")); +const patternHelper = __importStar(require("./internal-pattern-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_pattern_1 = require("./internal-pattern"); +const internal_search_state_1 = require("./internal-search-state"); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.js.map b/node_modules/@actions/glob/lib/internal-globber.js.map new file mode 100644 index 00000000..cd1f05d9 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-globber.js","sourceRoot":"","sources":["../src/internal-globber.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,uCAAwB;AACxB,kFAAmE;AACnE,2CAA4B;AAC5B,yEAA0D;AAE1D,+DAA+C;AAC/C,yDAA0C;AAC1C,mEAAmD;AAEnD,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAiC/C,MAAa,cAAc;IAKzB,YAAoB,OAAqB;QAHxB,aAAQ,GAAc,EAAE,CAAA;QACxB,gBAAW,GAAa,EAAE,CAAA;QAGzC,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtD,CAAC;IAED,cAAc;QACZ,gBAAgB;QAChB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,CAAA;IACjC,CAAC;IAEK,IAAI;;;YACR,MAAM,MAAM,GAAa,EAAE,CAAA;;gBAC3B,KAA6B,IAAA,KAAA,cAAA,IAAI,CAAC,aAAa,EAAE,CAAA,IAAA;oBAAtC,MAAM,QAAQ,WAAA,CAAA;oBACvB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACtB;;;;;;;;;YACD,OAAO,MAAM,CAAA;;KACd;IAEM,aAAa;;YAClB,2BAA2B;YAC3B,MAAM,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YAC1D,wBAAwB;YACxB,MAAM,QAAQ,GAAc,EAAE,CAAA;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACtB,IACE,OAAO,CAAC,mBAAmB;oBAC3B,CAAC,OAAO,CAAC,iBAAiB;wBACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,EACzD;oBACA,QAAQ,CAAC,IAAI,CACX,IAAI,0BAAO,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CACjE,CAAA;iBACF;aACF;YAED,wBAAwB;YAExB,MAAM,KAAK,GAAkB,EAAE,CAAA;YAC/B,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,gBAAgB,UAAU,GAAG,CAAC,CAAA;gBAEzC,UAAU;gBACV,IAAI;oBACF,0DAA0D;oBAC1D,mDAAmD;oBACnD,cAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA,CAAA;iBACpC;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,SAAQ;qBACT;oBACD,MAAM,GAAG,CAAA;iBACV;gBAED,KAAK,CAAC,OAAO,CAAC,IAAI,mCAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;aAC9C;YAED,SAAS;YACT,MAAM,cAAc,GAAa,EAAE,CAAA,CAAC,wBAAwB;YAC5D,OAAO,KAAK,CAAC,MAAM,EAAE;gBACnB,MAAM;gBACN,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAiB,CAAA;gBAEvC,SAAS;gBACT,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBACtD,MAAM,YAAY,GAChB,CAAC,CAAC,KAAK,IAAI,aAAa,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBAC5D,IAAI,CAAC,KAAK,IAAI,CAAC,YAAY,EAAE;oBAC3B,SAAQ;iBACT;gBAED,OAAO;gBACP,MAAM,KAAK,GAAyB,cAAM,cAAc,CAAC,IAAI,CAC3D,IAAI,EACJ,OAAO,EACP,cAAc,CACf;gBAED,iEAAiE;iBAFhE,CAAA;gBAED,iEAAiE;gBACjE,IAAI,CAAC,KAAK,EAAE;oBACV,SAAQ;iBACT;gBAED,YAAY;gBACZ,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBACvB,UAAU;oBACV,IAAI,KAAK,GAAG,+BAAS,CAAC,SAAS,EAAE;wBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;qBAChB;oBACD,WAAW;yBACN,IAAI,CAAC,YAAY,EAAE;wBACtB,SAAQ;qBACT;oBAED,kCAAkC;oBAClC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;oBACjC,MAAM,UAAU,GAAG,CAAC,cAAM,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,CAAC,CAAC,GAAG,CAC3D,CAAC,CAAC,EAAE,CAAC,IAAI,mCAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,UAAU,CAAC,CAC1D,CAAA;oBACD,KAAK,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;iBACpC;gBACD,OAAO;qBACF,IAAI,KAAK,GAAG,+BAAS,CAAC,IAAI,EAAE;oBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;iBAChB;aACF;QACH,CAAC;KAAA;IAED;;OAEG;IACH,MAAM,CAAO,MAAM,CACjB,QAAgB,EAChB,OAAqB;;YAErB,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,OAAO,CAAC,CAAA;YAE1C,IAAI,UAAU,EAAE;gBACd,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;gBAC1C,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;aACzC;YAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YACrD,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,mBAAmB;gBACnB,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACjC,SAAQ;iBACT;gBACD,UAAU;qBACL;oBACH,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,0BAAO,CAAC,IAAI,CAAC,CAAC,CAAA;iBACxC;aACF;YAED,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAA;YAEzE,OAAO,MAAM,CAAA;QACf,CAAC;KAAA;IAEO,MAAM,CAAO,IAAI,CACvB,IAAiB,EACjB,OAAoB,EACpB,cAAwB;;YAExB,QAAQ;YACR,uEAAuE;YACvE,8CAA8C;YAC9C,IAAI,KAAe,CAAA;YACnB,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBAC/B,IAAI;oBACF,kCAAkC;oBAClC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;iBAC1C;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,IAAI,OAAO,CAAC,uBAAuB,EAAE;4BACnC,IAAI,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;4BAC3C,OAAO,SAAS,CAAA;yBACjB;wBAED,MAAM,IAAI,KAAK,CACb,sCAAsC,IAAI,CAAC,IAAI,8CAA8C,CAC9F,CAAA;qBACF;oBAED,MAAM,GAAG,CAAA;iBACV;aACF;iBAAM;gBACL,uCAAuC;gBACvC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;aAC3C;YAED,+DAA+D;YAC/D,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBACtD,mBAAmB;gBACnB,MAAM,QAAQ,GAAW,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;gBAE9D,oDAAoD;gBACpD,OAAO,cAAc,CAAC,MAAM,IAAI,IAAI,CAAC,KAAK,EAAE;oBAC1C,cAAc,CAAC,GAAG,EAAE,CAAA;iBACrB;gBAED,mBAAmB;gBACnB,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACtD,IAAI,CAAC,KAAK,CACR,oCAAoC,IAAI,CAAC,IAAI,mBAAmB,QAAQ,GAAG,CAC5E,CAAA;oBACD,OAAO,SAAS,CAAA;iBACjB;gBAED,6BAA6B;gBAC7B,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;aAC9B;YAED,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;CACF;AAvMD,wCAuMC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.d.ts b/node_modules/@actions/glob/lib/internal-match-kind.d.ts new file mode 100644 index 00000000..a7e60aaf --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.d.ts @@ -0,0 +1,13 @@ +/** + * Indicates whether a pattern matches a path + */ +export declare enum MatchKind { + /** Not matched */ + None = 0, + /** Matched if the path is a directory */ + Directory = 1, + /** Matched if the path is a regular file */ + File = 2, + /** Matched */ + All = 3 +} diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js b/node_modules/@actions/glob/lib/internal-match-kind.js new file mode 100644 index 00000000..37146ae7 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js.map b/node_modules/@actions/glob/lib/internal-match-kind.js.map new file mode 100644 index 00000000..6fa3c9f7 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-match-kind.js","sourceRoot":"","sources":["../src/internal-match-kind.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,IAAY,SAYX;AAZD,WAAY,SAAS;IACnB,kBAAkB;IAClB,yCAAQ,CAAA;IAER,yCAAyC;IACzC,mDAAa,CAAA;IAEb,4CAA4C;IAC5C,yCAAQ,CAAA;IAER,cAAc;IACd,uCAAsB,CAAA;AACxB,CAAC,EAZW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAYpB"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.d.ts b/node_modules/@actions/glob/lib/internal-path-helper.d.ts new file mode 100644 index 00000000..0375c36c --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.d.ts @@ -0,0 +1,42 @@ +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +export declare function dirname(p: string): string; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +export declare function ensureAbsoluteRoot(root: string, itemPath: string): string; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +export declare function hasAbsoluteRoot(itemPath: string): boolean; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +export declare function hasRoot(itemPath: string): boolean; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +export declare function normalizeSeparators(p: string): string; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +export declare function safeTrimTrailingSeparator(p: string): string; diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js b/node_modules/@actions/glob/lib/internal-path-helper.js new file mode 100644 index 00000000..5057add3 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js @@ -0,0 +1,198 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(require("path")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; +} +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js.map b/node_modules/@actions/glob/lib/internal-path-helper.js.map new file mode 100644 index 00000000..1dc7ef9a --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path-helper.js","sourceRoot":"","sources":["../src/internal-path-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;;;;;;;;;;;;;;GAgBG;AACH,SAAgB,OAAO,CAAC,CAAS;IAC/B,wDAAwD;IACxD,CAAC,GAAG,yBAAyB,CAAC,CAAC,CAAC,CAAA;IAEhC,kDAAkD;IAClD,IAAI,UAAU,IAAI,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACnD,OAAO,CAAC,CAAA;KACT;IAED,cAAc;IACd,IAAI,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IAE5B,gEAAgE;IAChE,IAAI,UAAU,IAAI,wBAAwB,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACvD,MAAM,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAA;KAC3C;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAlBD,0BAkBC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,IAAY,EAAE,QAAgB;IAC/D,gBAAM,CAAC,IAAI,EAAE,uDAAuD,CAAC,CAAA;IACrE,gBAAM,CAAC,QAAQ,EAAE,2DAA2D,CAAC,CAAA;IAE7E,iBAAiB;IACjB,IAAI,eAAe,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,QAAQ,CAAA;KAChB;IAED,UAAU;IACV,IAAI,UAAU,EAAE;QACd,sCAAsC;QACtC,IAAI,QAAQ,CAAC,KAAK,CAAC,yBAAyB,CAAC,EAAE;YAC7C,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACvB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,0CAA0C;YAC1C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE;gBACtD,sBAAsB;gBACtB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;oBACzB,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAC3C;gBACD,2BAA2B;qBACtB;oBACH,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACvB,GAAG,IAAI,IAAI,CAAA;qBACZ;oBACD,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAChE;aACF;YACD,kBAAkB;iBACb;gBACH,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;aAChD;SACF;QACD,oCAAoC;aAC/B,IAAI,mBAAmB,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE;YAC7D,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACzB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,OAAO,GAAG,GAAG,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;SAC3C;KACF;IAED,gBAAM,CACJ,eAAe,CAAC,IAAI,CAAC,EACrB,gEAAgE,CACjE,CAAA;IAED,8CAA8C;IAC9C,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE;QAC7D,sBAAsB;KACvB;SAAM;QACL,mBAAmB;QACnB,IAAI,IAAI,IAAI,CAAC,GAAG,CAAA;KACjB;IAED,OAAO,IAAI,GAAG,QAAQ,CAAA;AACxB,CAAC;AAlED,gDAkEC;AAED;;;GAGG;AACH,SAAgB,eAAe,CAAC,QAAgB;IAC9C,gBAAM,CAAC,QAAQ,EAAE,wDAAwD,CAAC,CAAA;IAE1E,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,iCAAiC;QACjC,OAAO,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAClE;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAdD,0CAcC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,QAAgB;IACtC,gBAAM,CAAC,QAAQ,EAAE,iDAAiD,CAAC,CAAA;IAEnE,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,8BAA8B;QAC9B,sBAAsB;QACtB,OAAO,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAC9D;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,0BAeC;AAED;;GAEG;AACH,SAAgB,mBAAmB,CAAC,CAAS;IAC3C,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IAEX,UAAU;IACV,IAAI,UAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,MAAM,KAAK,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA,CAAC,eAAe;QACnD,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA,CAAC,8BAA8B;KACtF;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,kDAeC;AAED;;;GAGG;AACH,SAAgB,yBAAyB,CAAC,CAAS;IACjD,yBAAyB;IACzB,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,EAAE,CAAA;KACV;IAED,uBAAuB;IACvB,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAE1B,oBAAoB;IACpB,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,OAAO,CAAC,CAAA;KACT;IAED,8CAA8C;IAC9C,IAAI,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;QAClB,OAAO,CAAC,CAAA;KACT;IAED,2CAA2C;IAC3C,IAAI,UAAU,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACvC,OAAO,CAAC,CAAA;KACT;IAED,gCAAgC;IAChC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;AAClC,CAAC;AA1BD,8DA0BC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.d.ts b/node_modules/@actions/glob/lib/internal-path.d.ts new file mode 100644 index 00000000..5e0a2641 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.d.ts @@ -0,0 +1,15 @@ +/** + * Helper class for parsing paths into segments + */ +export declare class Path { + segments: string[]; + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath: string | string[]); + /** + * Converts the path to it's string representation + */ + toString(): string; +} diff --git a/node_modules/@actions/glob/lib/internal-path.js b/node_modules/@actions/glob/lib/internal-path.js new file mode 100644 index 00000000..88a8715a --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js @@ -0,0 +1,113 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Path = void 0; +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.js.map b/node_modules/@actions/glob/lib/internal-path.js.map new file mode 100644 index 00000000..8e8eeb57 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path.js","sourceRoot":"","sources":["../src/internal-path.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;GAEG;AACH,MAAa,IAAI;IAGf;;;OAGG;IACH,YAAY,QAA2B;QANvC,aAAQ,GAAa,EAAE,CAAA;QAOrB,SAAS;QACT,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAChC,gBAAM,CAAC,QAAQ,EAAE,wCAAwC,CAAC,CAAA;YAE1D,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;YAEzD,aAAa;YACb,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;gBACjC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;aACzC;YACD,SAAS;iBACJ;gBACH,0CAA0C;gBAC1C,IAAI,SAAS,GAAG,QAAQ,CAAA;gBACxB,IAAI,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACvC,OAAO,GAAG,KAAK,SAAS,EAAE;oBACxB,kBAAkB;oBAClB,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;oBACzC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;oBAE/B,4BAA4B;oBAC5B,SAAS,GAAG,GAAG,CAAA;oBACf,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;iBACpC;gBAED,wBAAwB;gBACxB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;aACjC;SACF;QACD,QAAQ;aACH;YACH,oBAAoB;YACpB,gBAAM,CACJ,QAAQ,CAAC,MAAM,GAAG,CAAC,EACnB,iDAAiD,CAClD,CAAA;YAED,eAAe;YACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACxC,IAAI,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAA;gBAEzB,oBAAoB;gBACpB,gBAAM,CACJ,OAAO,EACP,0DAA0D,CAC3D,CAAA;gBAED,oBAAoB;gBACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;gBAErD,eAAe;gBACf,IAAI,CAAC,KAAK,CAAC,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;oBAC1C,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;oBACvD,gBAAM,CACJ,OAAO,KAAK,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EACvC,8EAA8E,CAC/E,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;gBACD,qBAAqB;qBAChB;oBACH,yBAAyB;oBACzB,gBAAM,CACJ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAC3B,0DAA0D,CAC3D,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;aACF;SACF;IACH,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,gBAAgB;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;QAE7B,aAAa;QACb,IAAI,SAAS,GACX,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,IAAI,SAAS,EAAE;gBACb,SAAS,GAAG,KAAK,CAAA;aAClB;iBAAM;gBACL,MAAM,IAAI,IAAI,CAAC,GAAG,CAAA;aACnB;YAED,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SAC3B;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAvGD,oBAuGC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts new file mode 100644 index 00000000..c5dcc072 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts @@ -0,0 +1,15 @@ +import { MatchKind } from './internal-match-kind'; +import { Pattern } from './internal-pattern'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +export declare function getSearchPaths(patterns: Pattern[]): string[]; +/** + * Matches the patterns against the path + */ +export declare function match(patterns: Pattern[], itemPath: string): MatchKind; +/** + * Checks whether to descend further into the directory + */ +export declare function partialMatch(patterns: Pattern[], itemPath: string): boolean; diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js b/node_modules/@actions/glob/lib/internal-pattern-helper.js new file mode 100644 index 00000000..a7a0440d --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js @@ -0,0 +1,94 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(require("./internal-path-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js.map b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map new file mode 100644 index 00000000..ae372853 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern-helper.js","sourceRoot":"","sources":["../src/internal-pattern-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mEAAoD;AACpD,+DAA+C;AAG/C,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;GAGG;AACH,SAAgB,cAAc,CAAC,QAAmB;IAChD,yBAAyB;IACzB,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAA;IAE1C,mCAAmC;IACnC,MAAM,aAAa,GAA4B,EAAE,CAAA;IACjD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,aAAa,CAAC,GAAG,CAAC,GAAG,WAAW,CAAA;KACjC;IAED,MAAM,MAAM,GAAa,EAAE,CAAA;IAE3B,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,4BAA4B;QAC5B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,UAAU,EAAE;YACrC,SAAQ;SACT;QAED,oCAAoC;QACpC,IAAI,aAAa,GAAG,KAAK,CAAA;QACzB,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QACxC,OAAO,MAAM,KAAK,OAAO,EAAE;YACzB,IAAI,aAAa,CAAC,MAAM,CAAC,EAAE;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,MAAK;aACN;YAED,OAAO,GAAG,MAAM,CAAA;YAChB,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;SACrC;QAED,2CAA2C;QAC3C,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;YAC/B,aAAa,CAAC,GAAG,CAAC,GAAG,UAAU,CAAA;SAChC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,wCA8CC;AAED;;GAEG;AACH,SAAgB,KAAK,CAAC,QAAmB,EAAE,QAAgB;IACzD,IAAI,MAAM,GAAc,+BAAS,CAAC,IAAI,CAAA;IAEtC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SACnC;aAAM;YACL,MAAM,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SAClC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sBAYC;AAED;;GAEG;AACH,SAAgB,YAAY,CAAC,QAAmB,EAAE,QAAgB;IAChE,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAA;AAClE,CAAC;AAFD,oCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.d.ts b/node_modules/@actions/glob/lib/internal-pattern.d.ts new file mode 100644 index 00000000..2c2224c8 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.d.ts @@ -0,0 +1,64 @@ +import { MatchKind } from './internal-match-kind'; +export declare class Pattern { + /** + * Indicates whether matches should be excluded from the result set + */ + readonly negate: boolean; + /** + * The directory to search. The literal path prior to the first glob segment. + */ + readonly searchPath: string; + /** + * The path/pattern segments. Note, only the first segment (the root directory) + * may contain a directory separator character. Use the trailingSeparator field + * to determine whether the pattern ended with a trailing slash. + */ + readonly segments: string[]; + /** + * Indicates the pattern should only match directories, not regular files. + */ + readonly trailingSeparator: boolean; + /** + * The Minimatch object used for matching + */ + private readonly minimatch; + /** + * Used to workaround a limitation with Minimatch when determining a partial + * match and the path is a root directory. For example, when the pattern is + * `/foo/**` or `C:\foo\**` and the path is `/` or `C:\`. + */ + private readonly rootRegExp; + /** + * Indicates that the pattern is implicitly added as opposed to user specified. + */ + private readonly isImplicitPattern; + constructor(pattern: string); + constructor(pattern: string, isImplicitPattern: boolean, segments: undefined, homedir: string); + constructor(negate: boolean, isImplicitPattern: boolean, segments: string[], homedir?: string); + /** + * Matches the pattern against the specified path + */ + match(itemPath: string): MatchKind; + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath: string): boolean; + /** + * Escapes glob patterns within a path + */ + static globEscape(s: string): string; + /** + * Normalizes slashes and ensures absolute root + */ + private static fixupPattern; + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + private static getLiteral; + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + private static regExpEscape; +} diff --git a/node_modules/@actions/glob/lib/internal-pattern.js b/node_modules/@actions/glob/lib/internal-pattern.js new file mode 100644 index 00000000..a17ffe97 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js @@ -0,0 +1,255 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const minimatch_1 = require("minimatch"); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_path_1 = require("./internal-path"); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.js.map b/node_modules/@actions/glob/lib/internal-pattern.js.map new file mode 100644 index 00000000..08e172d7 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern.js","sourceRoot":"","sources":["../src/internal-pattern.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAC3B,yCAA8E;AAC9E,+DAA+C;AAC/C,mDAAoC;AAEpC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C,MAAa,OAAO;IAqDlB,YACE,eAAiC,EACjC,iBAAiB,GAAG,KAAK,EACzB,QAAmB,EACnB,OAAgB;QAxDlB;;WAEG;QACM,WAAM,GAAY,KAAK,CAAA;QAuD9B,mBAAmB;QACnB,IAAI,OAAe,CAAA;QACnB,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;YACvC,OAAO,GAAG,eAAe,CAAC,IAAI,EAAE,CAAA;SACjC;QACD,oBAAoB;aACf;YACH,qBAAqB;YACrB,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;YACzB,gBAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,qCAAqC,CAAC,CAAA;YAC9D,MAAM,IAAI,GAAG,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;YAC5C,gBAAM,CACJ,IAAI,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,EACxC,wDAAwD,CACzD,CAAA;YACD,OAAO,GAAG,IAAI,oBAAI,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAA;YAC9C,IAAI,eAAe,EAAE;gBACnB,OAAO,GAAG,IAAI,OAAO,EAAE,CAAA;aACxB;SACF;QAED,SAAS;QACT,OAAO,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC9B,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,MAAM,CAAA;YAC1B,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;SACnC;QAED,8CAA8C;QAC9C,OAAO,GAAG,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAEhD,WAAW;QACX,IAAI,CAAC,QAAQ,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAA;QAE1C,wFAAwF;QACxF,IAAI,CAAC,iBAAiB,GAAG,UAAU;aAChC,mBAAmB,CAAC,OAAO,CAAC;aAC5B,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACrB,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;QAEvD,6DAA6D;QAC7D,IAAI,SAAS,GAAG,KAAK,CAAA;QACrB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ;aACjC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;aAC/B,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;QACrD,IAAI,CAAC,UAAU,GAAG,IAAI,oBAAI,CAAC,cAAc,CAAC,CAAC,QAAQ,EAAE,CAAA;QAErD,wDAAwD;QACxD,IAAI,CAAC,UAAU,GAAG,IAAI,MAAM,CAC1B,OAAO,CAAC,YAAY,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EACvC,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACtB,CAAA;QAED,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAA;QAE1C,mBAAmB;QACnB,MAAM,gBAAgB,GAAsB;YAC1C,GAAG,EAAE,IAAI;YACT,OAAO,EAAE,IAAI;YACb,MAAM,EAAE,UAAU;YAClB,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI;YACX,QAAQ,EAAE,IAAI;SACf,CAAA;QACD,OAAO,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAA;QAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,qBAAS,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAA;IAC3D,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAgB;QACpB,4BAA4B;QAC5B,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;YACpD,oBAAoB;YACpB,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAA;YAEnD,wFAAwF;YACxF,sFAAsF;YACtF,+FAA+F;YAC/F,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,iBAAiB,KAAK,KAAK,EAAE;gBACpE,uFAAuF;gBACvF,sFAAsF;gBACtF,QAAQ,GAAG,GAAG,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;aACpC;SACF;aAAM;YACL,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;SAC1D;QAED,QAAQ;QACR,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;YAClC,OAAO,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,+BAAS,CAAC,SAAS,CAAC,CAAC,CAAC,+BAAS,CAAC,GAAG,CAAA;SACpE;QAED,OAAO,+BAAS,CAAC,IAAI,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,QAAgB;QAC3B,wDAAwD;QACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;QAEzD,+CAA+C;QAC/C,IAAI,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE;YAC7C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;SACtC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAC1C,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EACrB,IAAI,CACL,CAAA;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,UAAU,CAAC,CAAS;QACzB,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,4BAA4B;aAC5E,OAAO,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAC,sDAAsD;aACzF,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,aAAa;aACnC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA,CAAC,aAAa;IACxC,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CAAC,OAAe,EAAE,OAAgB;QAC3D,QAAQ;QACR,gBAAM,CAAC,OAAO,EAAE,yBAAyB,CAAC,CAAA;QAE1C,qDAAqD;QACrD,gCAAgC;QAChC,MAAM,eAAe,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACzD,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CACtB,CAAA;QACD,gBAAM,CACJ,eAAe,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EACrE,oBAAoB,OAAO,kDAAkD,CAC9E,CAAA;QAED,kEAAkE;QAClE,gBAAM,CACJ,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,CAAC,CAAC,EAClD,oBAAoB,OAAO,yCAAyC,CACrE,CAAA;QAED,oBAAoB;QACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;QAEjD,8BAA8B;QAC9B,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YACzD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAChE;QACD,8BAA8B;aACzB,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YAC9D,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC,OAAO,EAAE,CAAA;YACjC,gBAAM,CAAC,OAAO,EAAE,oCAAoC,CAAC,CAAA;YACrD,gBAAM,CACJ,UAAU,CAAC,eAAe,CAAC,OAAO,CAAC,EACnC,wDAAwD,OAAO,GAAG,CACnE,CAAA;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAC1D;QACD,2DAA2D;aACtD,IACH,UAAU;YACV,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,OAAO,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,EAC9D;YACA,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CACtC,gBAAgB,EAChB,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CACrB,CAAA;YACD,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAC9C,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,mDAAmD;aAC9C,IAAI,UAAU,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;YACtE,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAA;YAChE,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBACxB,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,iCAAiC;aAC5B;YACH,OAAO,GAAG,UAAU,CAAC,kBAAkB,CACrC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EACjC,OAAO,CACR,CAAA;SACF;QAED,OAAO,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;IAChD,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,UAAU,CAAC,OAAe;QACvC,IAAI,OAAO,GAAG,EAAE,CAAA;QAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAA;YACpB,SAAS;YACT,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBACvD,OAAO,IAAI,OAAO,CAAC,EAAE,CAAC,CAAC,CAAA;gBACvB,SAAQ;aACT;YACD,WAAW;iBACN,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,EAAE;gBAC/B,OAAO,EAAE,CAAA;aACV;YACD,gBAAgB;iBACX,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBAC5C,IAAI,GAAG,GAAG,EAAE,CAAA;gBACZ,IAAI,MAAM,GAAG,CAAC,CAAC,CAAA;gBACf,KAAK,IAAI,EAAE,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,EAAE,EAAE;oBAC9C,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAAA;oBACtB,SAAS;oBACT,IAAI,EAAE,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;wBACzD,GAAG,IAAI,OAAO,CAAC,EAAE,EAAE,CAAC,CAAA;wBACpB,SAAQ;qBACT;oBACD,SAAS;yBACJ,IAAI,EAAE,KAAK,GAAG,EAAE;wBACnB,MAAM,GAAG,EAAE,CAAA;wBACX,MAAK;qBACN;oBACD,YAAY;yBACP;wBACH,GAAG,IAAI,EAAE,CAAA;qBACV;iBACF;gBAED,UAAU;gBACV,IAAI,MAAM,IAAI,CAAC,EAAE;oBACf,iBAAiB;oBACjB,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;wBAClB,OAAO,EAAE,CAAA;qBACV;oBAED,qBAAqB;oBACrB,IAAI,GAAG,EAAE;wBACP,OAAO,IAAI,GAAG,CAAA;wBACd,CAAC,GAAG,MAAM,CAAA;wBACV,SAAQ;qBACT;iBACF;gBAED,sBAAsB;aACvB;YAED,SAAS;YACT,OAAO,IAAI,CAAC,CAAA;SACb;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,YAAY,CAAC,CAAS;QACnC,OAAO,CAAC,CAAC,OAAO,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;IAC7C,CAAC;CACF;AAzUD,0BAyUC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.d.ts b/node_modules/@actions/glob/lib/internal-search-state.d.ts new file mode 100644 index 00000000..aac344eb --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.d.ts @@ -0,0 +1,5 @@ +export declare class SearchState { + readonly path: string; + readonly level: number; + constructor(path: string, level: number); +} diff --git a/node_modules/@actions/glob/lib/internal-search-state.js b/node_modules/@actions/glob/lib/internal-search-state.js new file mode 100644 index 00000000..5d266ce1 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.js.map b/node_modules/@actions/glob/lib/internal-search-state.js.map new file mode 100644 index 00000000..44210124 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-search-state.js","sourceRoot":"","sources":["../src/internal-search-state.ts"],"names":[],"mappings":";;;AAAA,MAAa,WAAW;IAItB,YAAY,IAAY,EAAE,KAAa;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;CACF;AARD,kCAQC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/package.json b/node_modules/@actions/glob/package.json new file mode 100644 index 00000000..4dfd76aa --- /dev/null +++ b/node_modules/@actions/glob/package.json @@ -0,0 +1,43 @@ +{ + "name": "@actions/glob", + "version": "0.1.2", + "preview": true, + "description": "Actions glob lib", + "keywords": [ + "github", + "actions", + "glob" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/glob", + "license": "MIT", + "main": "lib/glob.js", + "types": "lib/glob.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/glob" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } +} diff --git a/node_modules/@actions/io/package.json b/node_modules/@actions/io/package.json index 114e8e59..5ebc63a4 100644 --- a/node_modules/@actions/io/package.json +++ b/node_modules/@actions/io/package.json @@ -1,6 +1,6 @@ { "name": "@actions/io", - "version": "1.1.1", + "version": "1.1.2", "description": "Actions io lib", "keywords": [ "github", diff --git a/node_modules/@azure/abort-controller/CHANGELOG.md b/node_modules/@azure/abort-controller/CHANGELOG.md new file mode 100644 index 00000000..b7645230 --- /dev/null +++ b/node_modules/@azure/abort-controller/CHANGELOG.md @@ -0,0 +1,34 @@ +# Release History + +## 1.1.0 (2022-05-05) + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features +- With the dropping of support for Node.js versions that are no longer in LTS, the dependency on `@types/node` has been updated to version 12. Read our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +## 1.0.4 (2021-03-04) + +Fixes issue [13985](https://github.com/Azure/azure-sdk-for-js/issues/13985) where abort event listeners that removed themselves when invoked could prevent other event listeners from being invoked. + +## 1.0.3 (2021-02-23) + +Support Typescript version < 3.6 by down-leveling the type definition files. ([PR 12793](https://github.com/Azure/azure-sdk-for-js/pull/12793)) + +## 1.0.2 (2020-01-07) + +Updates the `tslib` dependency to version 2.x. + +## 1.0.1 (2019-12-04) + +Fixes the [bug 6271](https://github.com/Azure/azure-sdk-for-js/issues/6271) that can occur with angular prod builds due to triple-slash directives. +([PR 6344](https://github.com/Azure/azure-sdk-for-js/pull/6344)) + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/abort-controller` package. + +Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. +([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.2 (2019-09-09) + +Listeners attached to an `AbortSignal` now receive an event with the type `abort`. (PR #4756) diff --git a/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/abort-controller/README.md new file mode 100644 index 00000000..ce578d95 --- /dev/null +++ b/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,110 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortController` and `AbortSignal` classes. These classes are compatible +with the [AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) built into modern browsers +and the `AbortSignal` used by [fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API). +Use the `AbortController` class to create an instance of the `AbortSignal` class that can be used to cancel an operation +in an Azure SDK that accept a parameter of type `AbortSignalLike`. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use the `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortController.timeout(100)`. +that is cancelled after 100 milliseconds. + +Calling `abort()` on the instantiated `AbortController` invokes the registered `abort` +event listeners on the associated `AbortSignal`. +Any subsequent calls to `abort()` on the same controller will have no effect. + +The `AbortSignal.none` static property returns an `AbortSignal` that can not be aborted. + +Multiple instances of an `AbortSignal` can be linked so that calling `abort()` on the parent signal, +aborts all linked signals. +This linkage is one-way, meaning that a parent signal can affect a linked signal, but not the other way around. +To link `AbortSignals` together, pass in the parent signals to the `AbortController` constructor. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +import { AbortController } from "@azure/abort-controller"; + +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const signal = AbortController.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +### Example 3 - Aborting sub-tasks + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +const subTask1 = new AbortController(allTasksController.signal); +const subtask2 = new AbortController(allTasksController.signal); + +allTasksController.abort(); // aborts allTasksSignal, subTask1, subTask2 +subTask1.abort(); // aborts only subTask1 +``` + +### Example 4 - Aborting with parent signal or timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +// create a subtask controller that can be aborted manually, +// or when either the parent task aborts or the timeout is reached. +const subTask = new AbortController(allTasksController.signal, AbortController.timeout(100)); + +allTasksController.abort(); // aborts allTasksSignal, subTask +subTask.abort(); // aborts only subTask +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js new file mode 100644 index 00000000..0d260f30 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortSignal, abortSignal } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} +//# sourceMappingURL=AbortController.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map new file mode 100644 index 00000000..8a6f7d0d --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.js","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,WAAW,EAAE,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,MAAM,OAAO,eAAe;IAW1B,6EAA6E;IAC7E,YAAY,aAAmB;QAC7B,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;SACR;QACD,qCAAqC;QACrC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACjC,8CAA8C;YAC9C,aAAa,GAAG,SAAS,CAAC;SAC3B;QACD,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,uDAAuD;YACvD,0CAA0C;YAC1C,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;aACd;iBAAM;gBACL,6DAA6D;gBAC7D,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;gBACf,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,MAAM,CAAC,OAAO,CAAC,EAAU;QAC9B,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;QAClD,uEAAuE;QACvE,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js new file mode 100644 index 00000000..e97336ad --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +export function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} +//# sourceMappingURL=AbortSignal.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map new file mode 100644 index 00000000..1ca33bdf --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.js","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,6CAA6C;AAI7C,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;GAYG;AACH,MAAM,OAAO,WAAW;IACtB;QA2BA;;WAEG;QACI,YAAO,GAAiC,IAAI,CAAC;QA7BlD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC3B,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACI,MAAM,KAAK,IAAI;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IAOD;;;;;OAKG;IACI,gBAAgB;IACrB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAC1C,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;OAKG;IACI,mBAAmB;IACxB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1C,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;YACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,MAAa;QACzB,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;IACJ,CAAC;CACF;AAED;;;;;;;;GAQG;AACH,wEAAwE;AACxE,MAAM,UAAU,WAAW,CAAC,MAAmB;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;KACR;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAC7B;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;IAC5C,IAAI,SAAS,EAAE;QACb,uDAAuD;QACvD,6DAA6D;QAC7D,aAAa;QACb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;KACJ;IAED,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js b/node_modules/@azure/abort-controller/dist-esm/src/index.js new file mode 100644 index 00000000..ddbf505b --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Changes to Aborter +// * Rename Aborter to AbortSignal +// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation +// * Remove withTimeout, it's moved to the controller +// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller. +// Potential changes to align with DOM Spec +// * dispatchEvent on Signal +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal } from "./AbortSignal"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js.map b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map new file mode 100644 index 00000000..def556e6 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,qBAAqB;AACrB,kCAAkC;AAClC,uHAAuH;AACvH,qDAAqD;AACrD,2GAA2G;AAE3G,2CAA2C;AAC3C,4BAA4B;AAE5B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Changes to Aborter\n// * Rename Aborter to AbortSignal\n// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation\n// * Remove withTimeout, it's moved to the controller\n// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller.\n\n// Potential changes to align with DOM Spec\n// * dispatchEvent on Signal\n\nexport { AbortController, AbortError } from \"./AbortController\";\nexport { AbortSignal, AbortSignalLike } from \"./AbortSignal\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist/index.js b/node_modules/@azure/abort-controller/dist/index.js new file mode 100644 index 00000000..650dd5f3 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js @@ -0,0 +1,239 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} + +// Copyright (c) Microsoft Corporation. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} + +exports.AbortController = AbortController; +exports.AbortError = AbortError; +exports.AbortSignal = AbortSignal; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/abort-controller/dist/index.js.map b/node_modules/@azure/abort-controller/dist/index.js.map new file mode 100644 index 00000000..148b005b --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/AbortSignal.ts","../src/AbortController.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"],"names":[],"mappings":";;;;AAAA;AACA;AAGA;AAIA,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;AAYG;MACU,WAAW,CAAA;AACtB,IAAA,WAAA,GAAA;AA2BA;;AAEG;QACI,IAAO,CAAA,OAAA,GAAiC,IAAI,CAAC;AA7BlD,QAAA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;AAC3B,QAAA,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;KAC7B;AAED;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACzB,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;AAED,QAAA,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;KAC9B;AAED;;;;AAIG;AACI,IAAA,WAAW,IAAI,GAAA;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;KAC1B;AAOD;;;;;AAKG;IACI,gBAAgB;;AAErB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;AAC1C,QAAA,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC1B;AAED;;;;;AAKG;IACI,mBAAmB;;AAExB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AAC1C,QAAA,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;AACd,YAAA,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;AACH,IAAA,aAAa,CAAC,MAAa,EAAA;AACzB,QAAA,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;KACH;AACF,CAAA;AAED;;;;;;;;AAQG;AACH;AACM,SAAU,WAAW,CAAC,MAAmB,EAAA;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;AACR,KAAA;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,QAAA,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC7B,KAAA;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;AAC5C,IAAA,IAAI,SAAS,EAAE;;;;QAIb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,KAAI;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;AACJ,KAAA;AAED,IAAA,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B;;ACtKA;AAKA;;;;;;;;;;;;;;;;;AAiBG;AACG,MAAO,UAAW,SAAQ,KAAK,CAAA;AACnC,IAAA,WAAA,CAAY,OAAgB,EAAA;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;KAC1B;AACF,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCG;MACU,eAAe,CAAA;;AAY1B,IAAA,WAAA,CAAY,aAAmB,EAAA;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;AACR,SAAA;;AAED,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;;YAEjC,aAAa,GAAG,SAAS,CAAC;AAC3B,SAAA;AACD,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;;;YAGxC,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;AACd,aAAA;AAAM,iBAAA;;AAEL,gBAAA,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAK;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;AACf,iBAAC,CAAC,CAAC;AACJ,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;AAED;;;AAGG;IACI,OAAO,OAAO,CAAC,EAAU,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;;AAElD,QAAA,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;AACf,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt b/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/README.md b/node_modules/@azure/abort-controller/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js b/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json b/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/package.json b/node_modules/@azure/abort-controller/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/abort-controller/package.json new file mode 100644 index 00000000..6126bc27 --- /dev/null +++ b/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,104 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "1.1.0", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/src types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "types": "./types/src/index.d.ts", + "typesVersions": { + "<3.6": { + "types/src/*": [ + "types/3.1/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "shims-public.d.ts", + "types/src", + "types/3.1", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=12.0.0" + }, + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "sideEffects": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "ts-node": "^10.0.0", + "typescript": "~4.6.0" + } +} diff --git a/node_modules/@azure/abort-controller/shims-public.d.ts b/node_modules/@azure/abort-controller/shims-public.d.ts new file mode 100644 index 00000000..002bec37 --- /dev/null +++ b/node_modules/@azure/abort-controller/shims-public.d.ts @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// forward declaration of Event in case DOM libs are not present. +interface Event {} diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts new file mode 100644 index 00000000..6f935db0 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /* + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + readonly signal: AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts new file mode 100644 index 00000000..53d6a778 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /* + * Status of whether aborted or not. + * + * @readonly + */ + readonly aborted: boolean; + /* + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static readonly none: AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/index.d.ts b/node_modules/@azure/abort-controller/types/3.1/index.d.ts new file mode 100644 index 00000000..1345f47f --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts new file mode 100644 index 00000000..c07beb90 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal(): AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map new file mode 100644 index 00000000..cb855afa --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.d.ts","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAe,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAAc;IAE7B;;OAEG;gBACS,aAAa,CAAC,EAAE,eAAe,EAAE;IAC7C;;OAEG;gBACS,GAAG,aAAa,EAAE,eAAe,EAAE;IA2B/C;;;;;OAKG;IACH,IAAW,MAAM,IAAI,WAAW,CAE/B;IAED;;;OAGG;IACH,KAAK,IAAI,IAAI;IAIb;;;OAGG;WACW,OAAO,CAAC,EAAE,EAAE,MAAM,GAAG,WAAW;CAS/C"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts new file mode 100644 index 00000000..7d31cb13 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted(): boolean; + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none(): AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map new file mode 100644 index 00000000..c82bea3a --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.d.ts","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":";AAWA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT;AAED;;;;;;;;;;;;GAYG;AACH,qBAAa,WAAY,YAAW,eAAe;;IAMjD;;;;OAIG;IACH,IAAW,OAAO,IAAI,OAAO,CAM5B;IAED;;;;OAIG;IACH,WAAkB,IAAI,IAAI,WAAW,CAEpC;IAED;;OAEG;IACI,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAQ;IAEpD;;;;;OAKG;IACI,gBAAgB,CAErB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IASP;;;;;OAKG;IACI,mBAAmB,CAExB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IAaP;;OAEG;IACH,aAAa,CAAC,MAAM,EAAE,KAAK,GAAG,OAAO;CAKtC;AAED;;;;;;;;GAQG;AAEH,wBAAgB,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,CAoBrD"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts b/node_modules/@azure/abort-controller/types/src/index.d.ts new file mode 100644 index 00000000..e5afaf43 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts.map b/node_modules/@azure/abort-controller/types/src/index.d.ts.map new file mode 100644 index 00000000..a0629392 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json new file mode 100644 index 00000000..7b5aee32 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.18.11" + } + ] +} diff --git a/node_modules/@azure/core-auth/CHANGELOG.md b/node_modules/@azure/core-auth/CHANGELOG.md new file mode 100644 index 00000000..ac1d8d73 --- /dev/null +++ b/node_modules/@azure/core-auth/CHANGELOG.md @@ -0,0 +1,61 @@ +# Release History + +## 1.3.2 (2021-07-01) + +- Added `tenantId` optional property to the `GetTokenOptions` interface. If `tenantId` is set, credentials will be able to use multi-tenant authentication, in the cases when it's enabled. + +## 1.3.0 (2021-03-30) + +- Adds the `AzureNamedKeyCredential` class which supports credential rotation and a corresponding `NamedKeyCredential` interface to support the use of static string-based names and keys in Azure clients. +- Adds the `isNamedKeyCredential` and `isSASCredential` typeguard functions similar to the existing `isTokenCredential`. + +## 1.2.0 (2021-02-08) + +- Add `AzureSASCredential` and `SASCredential` for use by service clients which allow authenticiation using a shared access signature. + +## 1.1.4 (2021-01-07) + +- Removed direct dependency on `@opentelemetry/api` and `@azure/core-tracing`. + +## 1.1.3 (2020-06-30) + +- Fix this library to be compatible with ES5 ([#8975](https://github.com/Azure/azure-sdk-for-js/pull/8975)) + +## 1.1.2 (2020-04-28) + +- Remove the below interfaces from the public API of this package as they are defined elsewhere. + Fixes [bug 8301](https://github.com/Azure/azure-sdk-for-js/issues/8301). + - OperationOptions + - OperationRequestOptions + - OperationTracingOptions + - AbortSignalLike + +## 1.1.1 (2020-04-01) + +- Provided down-leveled type declaration files for users of older TypeScript versions between 3.1 and 3.6. + +## 1.1.0 (2020-03-31) + +- Added an `AzureKeyCredential` class that supports credential rotation and a corresponding `KeyCredential` interface to support the use of static string-based keys in Azure clients. + +## 1.0.2 (2019-12-03) + +- Updated to use OpenTelemetry 0.2 via `@azure/core-tracing` + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/core-auth` package. + +- Standardizes API to be more consistent with other SDK packages. + ([PR #5899](https://github.com/Azure/azure-sdk-for-js/pull/5899)) +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.4 (2019-10-22) + +- Removed the `SimpleTokenCredential` implementation since it is not useful outside of test scenarios +- Updated to use the latest version of `@azure/core-tracing` package + +## 1.0.0-preview.3 (2019-09-09) + +- Fixed a ping timeout issue. The timeout is now configurable. ([PR #4941](https://github.com/Azure/azure-sdk-for-js/pull/4941)) diff --git a/node_modules/@azure/core-auth/LICENSE b/node_modules/@azure/core-auth/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/core-auth/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-auth/README.md b/node_modules/@azure/core-auth/README.md new file mode 100644 index 00000000..31ccc7f3 --- /dev/null +++ b/node_modules/@azure/core-auth/README.md @@ -0,0 +1,78 @@ +# Azure Core Authentication client library for JavaScript + +The `@azure/core-auth` package provides core interfaces and helper methods for authenticating with Azure services using Azure Active Directory and other authentication schemes common across the Azure SDK. As a "core" library, it shouldn't need to be added as a dependency to any user code, only other Azure SDK libraries. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-auth +``` + +## Key Concepts + +The `TokenCredential` interface represents a credential capable of providing an authentication token. The `@azure/identity` package contains various credentials that implement the `TokenCredential` interface. + +The `AzureKeyCredential` is a static key-based credential that supports key rotation via the `update` method. Use this when a single secret value is needed for authentication, e.g. when using a shared access key. + +The `AzureNamedKeyCredential` is a static name/key-based credential that supports name and key rotation via the `update` method. Use this when both a secret value and a label are needed, e.g. when using a shared access key and shared access key name. + +The `AzureSASCredential` is a static signature-based credential that supports updating the signature value via the `update` method. Use this when using a shared access signature. + +## Examples + +### AzureKeyCredential + +```js +const { AzureKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureKeyCredential("secret value"); +// prints: "secret value" +console.log(credential.key); +credential.update("other secret value"); +// prints: "other secret value" +console.log(credential.key); +``` + +### AzureNamedKeyCredential + +```js +const { AzureNamedKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureNamedKeyCredential("ManagedPolicy", "secret value"); +// prints: "ManagedPolicy, secret value" +console.log(`${credential.name}, ${credential.key}`); +credential.update("OtherManagedPolicy", "other secret value"); +// prints: "OtherManagedPolicy, other secret value" +console.log(`${credential.name}, ${credential.key}`); +``` + +### AzureSASCredential + +```js +const { AzureSASCredential } = require("@azure/core-auth"); + +const credential = new AzureSASCredential("signature1"); +// prints: "signature1" +console.log(credential.signature); +credential.update("signature2"); +// prints: "signature2" +console.log(credential.signature); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-auth%2FREADME.png) diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js new file mode 100644 index 00000000..bb9a9fef --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export class AzureKeyCredential { + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map new file mode 100644 index 00000000..7b129510 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IAU7B;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAnBD;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAgBD;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js new file mode 100644 index 00000000..31f93648 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "./typeguards"; +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export class AzureNamedKeyCredential { + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map new file mode 100644 index 00000000..0288da56 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,cAAc,CAAC;AAgBtD;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAkBlC;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE;YACjB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;SAC/D;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IA5BD;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAkBD;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;SACrE;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js new file mode 100644 index 00000000..d2b37060 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "./typeguards"; +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export class AzureSASCredential { + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map new file mode 100644 index 00000000..faa5d515 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,cAAc,CAAC;AAYtD;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IAU7B;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAnBD;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAgBD;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/index.js b/node_modules/@azure/core-auth/dist-esm/src/index.js new file mode 100644 index 00000000..a9c837b9 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/index.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AzureKeyCredential } from "./azureKeyCredential"; +export { AzureNamedKeyCredential, isNamedKeyCredential } from "./azureNamedKeyCredential"; +export { AzureSASCredential, isSASCredential } from "./azureSASCredential"; +export { isTokenCredential } from "./tokenCredential"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/index.js.map b/node_modules/@azure/core-auth/dist-esm/src/index.js.map new file mode 100644 index 00000000..1b885816 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAiB,MAAM,sBAAsB,CAAC;AACzE,OAAO,EACL,uBAAuB,EAEvB,oBAAoB,EACrB,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAE,kBAAkB,EAAiB,eAAe,EAAE,MAAM,sBAAsB,CAAC;AAE1F,OAAO,EAIL,iBAAiB,EAClB,MAAM,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential, KeyCredential } from \"./azureKeyCredential\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential\n} from \"./azureNamedKeyCredential\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential\n} from \"./tokenCredential\";\n\nexport { SpanContext, SpanOptions, SpanAttributes, Context, SpanAttributeValue } from \"./tracing\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js new file mode 100644 index 00000000..5ca7f68d --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map new file mode 100644 index 00000000..68878d69 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA2ElC;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tracing.js b/node_modules/@azure/core-auth/dist-esm/src/tracing.js new file mode 100644 index 00000000..377718d1 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tracing.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map b/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map new file mode 100644 index 00000000..18a15307 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Span attributes.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans.\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry.\n */\nexport declare interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n}\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/typeguards.js b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js new file mode 100644 index 00000000..551fe89c --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * Note: The properties may be inherited. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + * @internal + */ +export function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * Note: The property may be inherited. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + * @internal + */ +function objectHasProperty(thing, property) { + return typeof thing === "object" && property in thing; +} +//# sourceMappingURL=typeguards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map new file mode 100644 index 00000000..fe5158a9 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.js","sourceRoot":"","sources":["../../src/typeguards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,SAAS,SAAS,CAAI,KAA2B;IAC/C,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAClD,OAAO,KAAK,CAAC;KACd;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE;QACjC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;GAMG;AACH,SAAS,iBAAiB,CACxB,KAAY,EACZ,QAAsB;IAEtB,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAAC;AACrF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n * @internal\n */\nfunction isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * Note: The properties may be inherited.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n * @internal\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[]\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * Note: The property may be inherited.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n * @internal\n */\nfunction objectHasProperty(\n thing: Thing,\n property: PropertyName\n): thing is Thing & Record {\n return typeof thing === \"object\" && property in (thing as Record);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/index.js b/node_modules/@azure/core-auth/dist/index.js new file mode 100644 index 00000000..d074ccd0 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/index.js @@ -0,0 +1,215 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * Note: The properties may be inherited. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + * @internal + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * Note: The property may be inherited. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + * @internal + */ +function objectHasProperty(thing, property) { + return typeof thing === "object" && property in thing; +} + +// Copyright (c) Microsoft Corporation. +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} + +// Copyright (c) Microsoft Corporation. +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +exports.AzureKeyCredential = AzureKeyCredential; +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +exports.AzureSASCredential = AzureSASCredential; +exports.isNamedKeyCredential = isNamedKeyCredential; +exports.isSASCredential = isSASCredential; +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-auth/dist/index.js.map b/node_modules/@azure/core-auth/dist/index.js.map new file mode 100644 index 00000000..f80a805b --- /dev/null +++ b/node_modules/@azure/core-auth/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/azureKeyCredential.ts","../src/typeguards.ts","../src/azureNamedKeyCredential.ts","../src/azureSASCredential.ts","../src/tokenCredential.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n * @internal\n */\nfunction isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * Note: The properties may be inherited.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n * @internal\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[]\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * Note: The property may be inherited.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n * @internal\n */\nfunction objectHasProperty(\n thing: Thing,\n property: PropertyName\n): thing is Thing & Record {\n return typeof thing === \"object\" && property in (thing as Record);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"],"names":[],"mappings":";;;;AAAA;AACA;AAYA;;;;MAIa,kBAAkB;;;;;;;IAgB7B,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;KACjB;;;;IAhBD,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;;;;;;;;;IAwBM,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;KACpB;;;ACnDH;AACA;AAEA;;;;;AAKA,SAAS,SAAS,CAAI,KAA2B;IAC/C,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;;;;AAOA,SAAgB,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAClD,OAAO,KAAK,CAAC;KACd;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE;QACjC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;AAOA,SAAS,iBAAiB,CACxB,KAAY,EACZ,QAAsB;IAEtB,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAAC;AACrF,CAAC;;AChDD;AACA,AAkBA;;;;AAIA,MAAa,uBAAuB;;;;;;;;IAyBlC,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE;YACjB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;SAC/D;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;KACjB;;;;IAzBD,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;;;;IAKD,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;;;;;;IA2BM,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;SACrE;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;KACpB;CACF;AAED;;;;;AAKA,SAAgB,oBAAoB,CAAC,UAAmB;IACtD,QACE,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,EACnC;AACJ,CAAC;;ACvFD;AACA,AAcA;;;;AAIA,MAAa,kBAAkB;;;;;;;IAgB7B,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;KAC7B;;;;IAhBD,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;KACxB;;;;;;;;;IAwBM,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;KAChC;CACF;AAED;;;;;AAKA,SAAgB,eAAe,CAAC,UAAmB;IACjD,QACE,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,EAC7F;AACJ,CAAC;;ACrED;AACA;AA2EA;;;;;AAKA,SAAgB,iBAAiB,CAAC,UAAmB;;;;;;IAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,QACE,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;SAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;AACJ,CAAC;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/README.md b/node_modules/@azure/core-auth/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js b/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json b/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/package.json b/node_modules/@azure/core-auth/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.html b/node_modules/@azure/core-auth/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.js b/node_modules/@azure/core-auth/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-auth/package.json b/node_modules/@azure/core-auth/package.json new file mode 100644 index 00000000..dfca7851 --- /dev/null +++ b/node_modules/@azure/core-auth/package.json @@ -0,0 +1,99 @@ +{ + "name": "@azure/core-auth", + "version": "1.3.2", + "description": "Provides low-level interfaces and helper methods for authentication in Azure SDK", + "sdk-type": "client", + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/latest/core-auth.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/*": [ + "types/3.1/*" + ] + } + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1", + "build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1", + "build:samples": "echo Skipped.", + "build:test": "tsc -p . && rollup -c rollup.test.config.js 2>&1", + "build:types": "downlevel-dts types/latest types/3.1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "prebuild": "npm run clean", + "test:browser": "npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run build:test && npm run unit-test:node && npm run integration-test:node", + "test": "npm run build:test && npm run unit-test && npm run integration-test", + "unit-test:browser": "echo skipped", + "unit-test:node": "mocha dist-test/**/*.js --reporter ../../../common/tools/mocha-multi-reporter.js", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src" + }, + "files": [ + "dist/", + "dist-esm/src/", + "types/latest/core-auth.d.ts", + "types/3.1", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "authentication", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=12.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-auth/README.md", + "sideEffects": false, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.7.11", + "@rollup/plugin-commonjs": "11.0.2", + "@rollup/plugin-json": "^4.0.0", + "@rollup/plugin-multi-entry": "^3.0.0", + "@rollup/plugin-node-resolve": "^8.0.0", + "@rollup/plugin-replace": "^2.2.0", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "assert": "^1.4.1", + "cross-env": "^7.0.2", + "downlevel-dts": "~0.4.0", + "eslint": "^7.15.0", + "inherits": "^2.0.3", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "prettier": "^1.16.4", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-terser": "^5.1.1", + "rollup-plugin-visualizer": "^4.0.4", + "typescript": "~4.2.0", + "util": "^0.12.1", + "typedoc": "0.15.2" + } +} diff --git a/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts b/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts new file mode 100644 index 00000000..33d2dbcf --- /dev/null +++ b/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts @@ -0,0 +1,258 @@ +import { AbortSignalLike } from '@azure/abort-controller'; +/** + * Represents an access token with an expiration time. + */ +export declare interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + readonly key: string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + readonly key: string; + readonly name: string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + readonly signature: string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * An interface structurally compatible with OpenTelemetry. + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +/** + * Defines options for TokenCredential.getToken. + */ +export declare interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context + */ + tracingContext?: Context; + }; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +/** + * Represents a credential defined by a static API key. + */ +export declare interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Represents a credential defined by a static API name and key. + */ +export declare interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * Represents a credential defined by a static shared access signature. + */ +export declare interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Span attributes. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; +/** + * A light interface that tries to be structurally compatible with OpenTelemetry. + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; +} +/** + * An interface that enables manual propagation of Spans. + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; +} +/** + * Represents a credential capable of providing an authentication token. + */ +export declare interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +export {}; diff --git a/node_modules/@azure/core-auth/types/latest/core-auth.d.ts b/node_modules/@azure/core-auth/types/latest/core-auth.d.ts new file mode 100644 index 00000000..bea17451 --- /dev/null +++ b/node_modules/@azure/core-auth/types/latest/core-auth.d.ts @@ -0,0 +1,288 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * Represents an access token with an expiration time. + */ +export declare interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} + +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} + +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} + +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} + +/** + * An interface structurally compatible with OpenTelemetry. + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} + +/** + * Defines options for TokenCredential.getToken. + */ +export declare interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context + */ + tracingContext?: Context; + }; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} + +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; + +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; + +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; + +/** + * Represents a credential defined by a static API key. + */ +export declare interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} + +/** + * Represents a credential defined by a static API name and key. + */ +export declare interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} + +/** + * Represents a credential defined by a static shared access signature. + */ +export declare interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} + +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Span attributes. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; + +/** + * A light interface that tries to be structurally compatible with OpenTelemetry. + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; +} + +/** + * An interface that enables manual propagation of Spans. + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; +} + +/** + * Represents a credential capable of providing an authentication token. + */ +export declare interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} + +export { } diff --git a/node_modules/@azure/core-http/CHANGELOG.md b/node_modules/@azure/core-http/CHANGELOG.md new file mode 100644 index 00000000..e20ccb57 --- /dev/null +++ b/node_modules/@azure/core-http/CHANGELOG.md @@ -0,0 +1,262 @@ +# Release History + +## 2.2.5 (2022-05-05) + +### Bugs Fixed + +- Fix an issue where React-Native is loading the wrong file. Adding a `react-native` mapping to point to the ESM entrypoint file. [PR #21118](https://github.com/Azure/azure-sdk-for-js/pull/21118) +- delay creating XML parser/builder objects so that packages not requiring XML functionality but running on platforms lacking XML support can still load this package. [PR #21118](https://github.com/Azure/azure-sdk-for-js/pull/21118) +- Add a `react-native` mapping to use `xml2js` as it is already in our dependency list. Customer can get it to work after installing `stream` and `timers` packages in their React-Native project. +- Fix a run-time error in user agent policy when running in React-Native. + +## 2.2.4 (2022-02-03) + +### Bugs Fixed + +- Updated the HTTP tracing span names to conform to the [OpenTelemetry Specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#name). [#19838](https://github.com/Azure/azure-sdk-for-js/pull/19838) + - New HTTP spans will use the `HTTP ` convention instead of using the URL path. + +## 2.2.3 (2022-01-06) + +### Bugs Fixed + +- Fix `HttpHeaders.rawHeaders()` to preserve header name case. As a result HttpClient now sends requests with their original header names. `HttpHeaders.toJson()` now has an option to preserve header key casing. + +### Other Changes + +- Update dependency `node-fetch` version to `2.6.6` to address advisory [CVE-2020-15168](https://github.com/advisories/GHSA-w7rc-rwvf-8q5r) + +## 2.2.2 (2021-11-03) + +### Bugs Fixed + +- Fix the issue of `HttpHeaders.clone()` not preserving raw header names' casing [PR #18348](https://github.com/Azure/azure-sdk-for-js/pull/18348) +- Fix the issue of network connection not being closed when download stream is closed [PR #14015](https://github.com/Azure/azure-sdk-for-js/pull/14015) + +## 2.2.1 (2021-09-30) + +### Bugs Fixed + +- Fix incorrect caching of proxy agents [PR #17546](https://github.com/Azure/azure-sdk-for-js/pull/17546) + +## 2.2.0 (2021-09-02) + +### Bugs Fixed + +- `tracingPolicy` will no longer propagate tracing errors to the caller, and such errors will be logged instead and the operation does not get interrupted. [PR #16916](https://github.com/Azure/azure-sdk-for-js/pull/16916) + +## 2.1.0 (2021-07-22) + +### Features Added + +- `tracingPolicy` will no longer inject invalid traceparent headers if an incorrect tracer implementation is used. +- `proxyPolicy` now allows passing in a list of no-proxy patterns to override global ones loaded from NO_PROXY environment variable [PR #16414](https://github.com/Azure/azure-sdk-for-js/pull/16414) + +## 2.0.0 (2021-06-30) + +### Features Added + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features. +- Added support for the `Retry-After` header on responses with status code 503, Service Unavailable. +- Added support for multiple retries on the `ThrottlingRetryPolicy` (up to 3 by default). + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.12`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +### Fixed + +- Fixed an issue where `proxySettings` does not work when there is username but no password [Issue 15720](https://github.com/Azure/azure-sdk-for-js/issues/15720) +- Throttling retry policy respects abort signal [#15796](https://github.com/Azure/azure-sdk-for-js/issues/15796) + +## 1.2.6 (2021-06-14) + +### Key Bugs Fixed + +- Fixed an issue of lost properties when flattening array in deserialization [issue 15653](https://github.com/azure/azure-sdk-for-js/issues/15653) +- Fixed an issue of incorrect minimum version of tslib [issue 15697](https://github.com/Azure/azure-sdk-for-js/issues/15697) + +## 1.2.5 (2021-06-03) + +### Fixed + +- Delay loading of NO_PROXY environment variable until when request pipeline is being created. This fixes [issue 14873](https://github.com/Azure/azure-sdk-for-js/issues/14873) +- Fixed an issue where tracing spans were not setting a status correctly (on success or error) which results in the span status being `UNSET`. In addition, we will now capture the HTTP status code when a request fails in the tracing span. [PR 15061](https://github.com/Azure/azure-sdk-for-js/pull/15061) +- Fix packaging issue [PR 15286](https://github.com/Azure/azure-sdk-for-js/pull/15286) +- Improve the sanitizer to handle recursive objects [PR 15426](https://github.com/Azure/azure-sdk-for-js/pull/15426) + +## 1.2.4 (2021-03-30) + +- Rewrote `bearerTokenAuthenticationPolicy` to use a new backend that refreshes tokens only when they're about to expire and not multiple times before. This fixes the issue: [13369](https://github.com/Azure/azure-sdk-for-js/issues/13369). + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.11`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +## 1.2.3 (2021-02-04) + +- Don't set a default content-type when there is no request body. [PR 13233](https://github.com/Azure/azure-sdk-for-js/pull/13233) +- Clean up abort event handler properly for streaming operations. Fixed [issue 12029](https://github.com/Azure/azure-sdk-for-js/issues/12029) +- Reduce memory usage of the cache in proxy policy. Fixed [issue 13277](https://github.com/Azure/azure-sdk-for-js/issues/13277) +- Fix an issue where non-streaming response body was treated as stream [PR 13192](https://github.com/Azure/azure-sdk-for-js/pull/13192) +- Browser XML parser now lazily load parser error namespace into cache. Fixed [issue 13268](https://github.com/Azure/azure-sdk-for-js/issues/13268) +- Add ms-cv header used as correlation vector (used for distributed tracing) to list of non-redacted headers so that clients can share this header for debugging purposes. [PR 13541](https://github.com/Azure/azure-sdk-for-js/pull/13541) + +## 1.2.2 (2021-01-07) + +- Cache the default `HttpClient` used when one isn't passed in to `ServiceClient`. This means that for most packages we will only create a single `HttpClient`, which will improve platform resource usage by reducing overhead. [PR 12966](https://github.com/Azure/azure-sdk-for-js/pull/12966) + +## 1.2.1 (2020-12-09) + +- Support custom auth scopes. Scope is a mechanism in OAuth 2.0 to limit an application's access to a user's account [PR 12752](https://github.com/Azure/azure-sdk-for-js/pull/12752) +- Add helper function `createSpan` to help in the creation of tracing spans in Track2 libraries [PR 12525](https://github.com/Azure/azure-sdk-for-js/pull/12525) + +## 1.2.0 (2020-11-05) + +- Explicitly set `manual` redirect handling for node fetch. And fixing redirectPipeline [PR 11863](https://github.com/Azure/azure-sdk-for-js/pull/11863) +- Add support for multiple error response codes. [PR 11841](https://github.com/Azure/azure-sdk-for-js/) +- Allow customizing serializer behavior via optional `SerialzierOptions` parameters. Particularly allow using a different `XML_CHARKEY` than the default `_` when parsing XML [PR 12065](https://github.com/Azure/azure-sdk-for-js/pull/12065) +- Unexpected status code is now handled as an error if a mapper is not provided for it [PR 12153](https://github.com/Azure/azure-sdk-for-js/pull/12153) +- Empty collection is now serialized correctly for `multi` format query parameters [PR 12090](https://github.com/Azure/azure-sdk-for-js/pull/12090) +- De-serialize error from parsed object before trying to use its `code` and `message` properties [PR 12150](https://github.com/Azure/azure-sdk-for-js/pull/12150) +- Fix an error thrown on older versions of Safari [PR 11759](https://github.com/Azure/azure-sdk-for-js/pull/11759) + +## 1.1.9 (2020-09-30) + +- Add support for XML namespaces in deserialization [PR 11201](https://github.com/Azure/azure-sdk-for-js/pull/11201) + +- Add support for NDJSON format [PR 11325](https://github.com/Azure/azure-sdk-for-js/pull/11325) + +- Add support for `NO_PROXY` and `ALL_PROXY` environment variables [PR 7069](https://github.com/Azure/azure-sdk-for-js/pull/7069) + +- Serializer now resolves `additionalProperties` correctly from referenced mapper [PR 11473](https://github.com/Azure/azure-sdk-for-js/pull/11473) + +- Empty wrapped XML element lists are no properly de-serialized. This fixes [issue 11071](https://github.com/Azure/azure-sdk-for-js/issues/11071) + +## 1.1.8 (2020-09-08) + +- `URLQuery` parsing now accepts `=` in query parameter values and no longer drops such query parameter name/value pairs. This fixes [issue 11014](https://github.com/Azure/azure-sdk-for-js/issues/11014) + +## 1.1.7 (2020-09-02) + +- `BearerTokenAuthenticationPolicy` now starts trying to refresh the token 30 seconds before the token expires. It will only try to refresh said token on each request when refreshing is not in progress. In the mean time, requests will use the existing token. This fixes [issue 10084](https://github.com/Azure/azure-sdk-for-js/issues/10084). +- Un-export the `{...}Policy` classes that were meant to be internal to `@azure/core-http`. [PR 10738](https://github.com/Azure/azure-sdk-for-js/pull/10738) + +## 1.1.6 (2020-08-04) + +- Update dependencies `@azure/core-tracing` to version 1.0.0-preview.9 and `@opentelemetry/api` to version 0.10.2 [PR 10393](https://github.com/Azure/azure-sdk-for-js/pull/10393) + +## 1.1.5 (2020-08-04) + +- The global `fetch()` is no longer overridden by node-fetch. This fixed an issue impacting Electron render process. [PR #9880](https://github.com/Azure/azure-sdk-for-js/pull/9880) + +## 1.1.4 (2020-07-02) + +- Fix issue with flattened model serialization, where constant properties are being dropped [PR #8658](https://github.com/Azure/azure-sdk-for-js/pull/8658) +- Switch to use `x-ms-useragent` header key for passing user agent info in browsers [PR #9490](https://github.com/Azure/azure-sdk-for-js/pull/9490) +- Refactor `ExponentialRetryPolicy` and `SystemErrorRetryPolicy` to share common code [PR #9667](https://github.com/Azure/azure-sdk-for-js/pull/9490) + +## 1.1.3 (2020-06-03) + +- Fix issue of `SystemErrorRetryPolicy` didn't retry on errors [PR #8803](https://github.com/Azure/azure-sdk-for-js/pull/8803) +- Add support for serialization of text media type. [PR #8977](https://github.com/Azure/azure-sdk-for-js/pull/8977) +- Fix issue with URLBuilder incorrectly handling full URL in path. [PR #9245](https://github.com/Azure/azure-sdk-for-js/pull/9245) + +## 1.1.2 (2020-05-07) + +- Fix issue with null/undefined values in array and tabs/space delimiter arrays during sendOperationRequest. [PR #8604](https://github.com/Azure/azure-sdk-for-js/pull/8604) + +## 1.1.1 (2020-04-28) + +- Add support for `text/plain` endpoints. [PR #7963](https://github.com/Azure/azure-sdk-for-js/pull/7963) +- Updated to use OpenTelemetry 0.6.1 via `@azure/core-tracing`. + +## 1.1.0 (2020-03-31) + +- A new interface `WebResourceLike` was introduced to avoid a direct dependency on the class `WebResource` in public interfaces. `HttpHeadersLike` was also added to replace references to `HttpHeaders`. This should improve cross-version compatibility for core-http. [PR #7873](https://github.com/Azure/azure-sdk-for-js/pull/7873) + +- Add support to disable response decompression in `node-fetch` Http client. [PR #7878](https://github.com/Azure/azure-sdk-for-js/pull/7878) + +## 1.0.4 (2020-03-03) + +- When an operation times out based on the `timeout` configured in the `OperationRequestOptions`, it gets terminated with an error. In this update, the error that is thrown in browser for such cases is updated to match what is thrown in node i.e an `AbortError` is thrown instead of the previous `RestError`. [PR #7159](https://github.com/Azure/azure-sdk-for-js/pull/7159) +- Support for username and password in the proxy url [PR #7211](https://github.com/Azure/azure-sdk-for-js/pull/7211) +- Removed dependency on `lib.dom.d.ts` so TypeScript users no longer need `lib: ["dom"]` in their tsconfig.json file to use this library. [PR #7500](https://github.com/Azure/azure-sdk-for-js/pull/7500) + +## 1.0.3 (2020-01-02) + +- Added `x-ms-useragent` to the list of allowed headers in request logs. +- Fix issue of data being pushed twice when reporting progress ([PR #6427](https://github.com/Azure/azure-sdk-for-js/issues/6427)) +- Move `getDefaultProxySettings()` calls into `proxyPolicy` so that libraries that don't use the PipelineOptions or createDefaultRequestPolicyFactories from core-http can use this behavior without duplicating code. ([PR #6478](https://github.com/Azure/azure-sdk-for-js/issues/6478)) +- Fix tracingPolicy() to set standard span attributes ([PR #6565](https://github.com/Azure/azure-sdk-for-js/pull/6565)). Now the following are set correctly for the spans + - `http.method` + - `http.url` + - `http.user_agent` + - `http.status_code` + - `requestId` + - `serviceRequestId` + - `kind` = Client + - span name: URI path + +## 1.0.2 (2019-12-02) + +- Updated to use OpenTelemetry 0.2 via `@azure/core-tracing` + +## 1.0.0 (2019-10-29) + +- This release marks the general availability of the `@azure/core-http` package. +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.6 (2019-10-22) + +- Introduced a HTTP pipeline configuration type, `PipelineOptions`, which makes it easier to configure common settings for API requests. This is now the options type used in the `@azure/keyvault-*` data plane libraries. +- Support for HTTP request logging has been redesigned to use `@azure/logger`; use`AZURE_LOG_LEVEL="info"` to see full request/response logs when running in Node.js. In the browser, you can import `setLogLevel` from `@azure/logger` to change the log level; logs will then be written to the browser console. +- Removed error type `ResponseBodyNotFoundError` that was introduced in the previous preview. Use cases for it were removed. +- Fixed an issue where error response details were not being deserialized correctly when the default mapper is used. +- In Node.js, HTTP agents configured for proxy or keepAlive are now reused across requests. This resolves a memory leak reported in [#4964](https://github.com/Azure/azure-sdk-for-js/issues/4964). +- Fixes a memory leak issue resulting from event listeners not being removed from abortSignals. +- Cancelling an operation using an `abortSignal` will now throw an `AbortError`. + The `name` property on the error will match "AbortError". + +## 1.0.0-preview.4 (2019-10-07) + +- No longer re-exports `@azure/core-tracing`. To enable tracing, call `setTracer` from `@azure/core-tracing` directly. + ([PR #5389](https://github.com/Azure/azure-sdk-for-js/pull/5389)) +- Report upload/download progress only after the first data chunk is received + ([PR #5298](https://github.com/Azure/azure-sdk-for-js/pull/5298)) +- Added new error type `ResponseBodyNotFoundError` for cases when the response body is unexpectedly empty. + ([PR #5369](https://github.com/Azure/azure-sdk-for-js/pull/5369)) +- Temporary fix for a memory leak issue resulting from creating new agents every time WebResource is cloned. + ([PR #5396](https://github.com/Azure/azure-sdk-for-js/pull/5396)) + +## 1.0.0-preview.3 (2019-09-09) + +- Syncs changes from `@azure/ms-rest-js` to `@azure/core-http`. + ([PR #4756](https://github.com/Azure/azure-sdk-for-js/pull/4756)). + - Updates HTTP clients to `fetch` and `node-fetch` for the browser and node.js respectively. +- Reintroduces `ServiceClientCredentials` type to `credentials` parameter of `ServiceClient` + ([PR #4773](https://github.com/Azure/azure-sdk-for-js/pull/4773)). +- Updates types for better compatibility with TypeScript 3.6.x. + ([PR #4928](https://github.com/Azure/azure-sdk-for-js/pull/4928)). +- Adds `TracingPolicy` to support setting `traceparent` and `tracestate` headers + when setting spans in operation as per the [trace context HTTP headers format](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format). + ([PR #4712](https://github.com/Azure/azure-sdk-for-js/pull/4712)). +- Adds `text/plain` as an accepted MIME type for JSON output. + ([PR #4975](https://github.com/Azure/azure-sdk-for-js/pull/4975)). +- Exposes `ProxySettings` type. ([PR #5043](https://github.com/Azure/azure-sdk-for-js/pull/5043)). +- Fixes bug where `WebResource.clone` would not copy `proxySettings` or `keepAlive` settings. + ([PR #5047](https://github.com/Azure/azure-sdk-for-js/pull/5047)). + +## 1.0.0-preview.2 (2019-08-05) + +- Removed `ServiceClientCredentials` type from `credentials` parameter of `ServiceClient` ([PR #4367](https://github.com/Azure/azure-sdk-for-js/pull/4367)). Credential implementations are now standardized on `@azure/core-auth`'s `TokenCredential` interface and provided by `@azure/identity`. +- Added an `AccessTokenCache` so that access tokens can be cached across pipeline instances ([PR #4174](https://github.com/Azure/azure-sdk-for-js/pull/4174)). +- Fixed the issue preventing `ServiceClient` from correctly setting the scope's resource URI when creating a `BearerTokenAuthenticationPolicy` ([PR #4335](https://github.com/Azure/azure-sdk-for-js/pull/4335)). +- Migrated over `AxiosHttpClient` fixes from `@azure/ms-rest-js` ([PR #4106](https://github.com/Azure/azure-sdk-for-js/pull/4106)). + +## 1.0.0-preview.1 (2019-06-25) + +- Forked `@azure/ms-rest-js` to `@azure/core-http` to form the base HTTP pipeline of the Azure SDK TypeScript/JavaScript libraries. +- Added `TokenCredential` to define a simple interface for credentials that provided bearer tokens +- Added `BearerTokenAuthenticationPolicy` which can use a `TokenCredential` implementation to perform bearer token authentication against Azure services diff --git a/node_modules/@azure/core-http/LICENSE b/node_modules/@azure/core-http/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/core-http/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-http/README.md b/node_modules/@azure/core-http/README.md new file mode 100644 index 00000000..2ee90212 --- /dev/null +++ b/node_modules/@azure/core-http/README.md @@ -0,0 +1,72 @@ +# Azure Core HTTP client library for JavaScript + +This is the core HTTP pipeline for Azure SDK JavaScript libraries which work in the browser and Node.js. This library is primarily intended to be used in code generated by [AutoRest](https://github.com/Azure/Autorest) and [`autorest.typescript`](https://github.com/Azure/autorest.typescript). + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://nodejs.org/about/releases/) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Installation + +This package is primarily used in generated code and not meant to be consumed directly by end users. + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/docs/architectureOverview.md). + +## Examples + +Examples can be found in the `samples` folder. + +## Next steps + +- Build this library (`core-http`). For more information on how to build project in this repo, please refer to the [Contributing Guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md). + +- The code in `samples\node-sample.ts` shows how to create a `ServiceClient` instance with a test `TokenCredential` implementation and use the client instance to perform a `GET` operation from the Azure management service endpoint for subscriptions. To run the code, first obtain an access token to the Azure management service. + +One easy way to get an access token is using [Azure CLI](https://docs.microsoft.com/cli/azure/?view=azure-cli-latest) + +1. Sign in +```shell +az login +``` +2. Select the subscription to use +```shell +az account set -s +``` +3. Obtain an access token +```shell +az account get-access-token --resource=https://management.azure.com +``` + +### NodeJS + +- Set values of `subscriptionId` and `token` variable in `samples/node-sample.ts` + +- Change directory to samples folder, compile the TypeScript code, then run the sample + +``` +cd samples +tsc node-sample.ts +node node-sample.js +``` + +### Browser + +- Set values of `subscriptionId` and `token` variable in `samples/index.js` +- Follow the instructions of [JavaScript Bundling Guide using Parcel](https://github.com/Azure/azure-sdk-for-js/blob/main/documentation/Bundling.md#using-parcel) to build and run the code in the browser. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-http%2FREADME.png) diff --git a/node_modules/@azure/core-http/dist-esm/src/coreHttp.js b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js new file mode 100644 index 00000000..76924097 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */ +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { ServiceClient, flattenResponse, createPipelineFromOptions, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { keepAlivePolicy } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { tracingPolicy } from "./policies/tracingPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { delay } from "./util/delay"; +// legacy exports. Use core-tracing instead (and remove on next major version update of core-http). +export { createSpanFunction } from "./createSpanLegacy"; +// Credentials +export { isTokenCredential } from "@azure/core-auth"; +export { ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map new file mode 100644 index 00000000..4cdae391 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"coreHttp.js","sourceRoot":"","sources":["../../src/coreHttp.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,wEAAwE;AACxE,yCAAyC;AAEzC,OAAO,EACL,WAAW,GAQZ,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAGL,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAS5B,OAAO,EACL,aAAa,EAEb,eAAe,EACf,yBAAyB,GAG1B,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAgB,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAmB,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAoB,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,GAGzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EAErB,uBAAuB,GAExB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAwB,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AACrC,mGAAmG;AACnG,OAAO,EAAE,kBAAkB,EAAc,MAAM,oBAAoB,CAAC;AAEpE,cAAc;AACd,OAAO,EAAiD,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAoB,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAGlE,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAqB,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */\n/// \n\nexport {\n WebResource,\n WebResourceLike,\n HttpRequestBody,\n RequestPrepareOptions,\n HttpMethods,\n ParameterValue,\n RequestOptionsBase,\n TransferProgressEvent,\n} from \"./webResource\";\nexport { CommonResponse, CommonRequestInit, CommonRequestInfo } from \"./nodeFetchHttpClient\";\nexport { DefaultHttpClient } from \"./defaultHttpClient\";\nexport { HttpClient } from \"./httpClient\";\nexport { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from \"./httpHeaders\";\nexport { HttpOperationResponse, HttpResponse, RestResponse } from \"./httpOperationResponse\";\nexport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nexport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\nexport { RestError } from \"./restError\";\nexport { OperationArguments } from \"./operationArguments\";\nexport {\n OperationOptions,\n OperationRequestOptions,\n operationOptionsToRequestOptionsBase,\n} from \"./operationOptions\";\nexport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n ParameterPath,\n} from \"./operationParameter\";\nexport { OperationResponse } from \"./operationResponse\";\nexport { OperationSpec } from \"./operationSpec\";\nexport {\n ServiceClient,\n ServiceClientOptions,\n flattenResponse,\n createPipelineFromOptions,\n ProxySettings,\n ProxyOptions,\n} from \"./serviceClient\";\nexport { PipelineOptions, InternalPipelineOptions } from \"./pipelineOptions\";\nexport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nexport { Constants } from \"./util/constants\";\nexport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nexport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nexport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nexport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nexport { exponentialRetryPolicy, RetryOptions, RetryMode } from \"./policies/exponentialRetryPolicy\";\nexport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nexport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nexport { getDefaultProxySettings, proxyPolicy } from \"./policies/proxyPolicy\";\nexport { redirectPolicy, RedirectOptions } from \"./policies/redirectPolicy\";\nexport { keepAlivePolicy, KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nexport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nexport { signingPolicy } from \"./policies/signingPolicy\";\nexport {\n userAgentPolicy,\n getDefaultUserAgentValue,\n UserAgentOptions,\n TelemetryInfo,\n} from \"./policies/userAgentPolicy\";\nexport {\n deserializationPolicy,\n DeserializationOptions,\n deserializeResponseBody,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nexport { tracingPolicy, TracingPolicyOptions } from \"./policies/tracingPolicy\";\nexport {\n MapperType,\n SimpleMapperType,\n CompositeMapperType,\n DictionaryMapperType,\n SequenceMapperType,\n EnumMapperType,\n Mapper,\n BaseMapper,\n CompositeMapper,\n SequenceMapper,\n DictionaryMapper,\n EnumMapper,\n MapperConstraints,\n PolymorphicDiscriminator,\n Serializer,\n UrlParameterValue,\n serializeObject,\n} from \"./serializer\";\nexport {\n stripRequest,\n stripResponse,\n executePromisesSequentially,\n generateUuid,\n encodeUri,\n ServiceCallback,\n promiseToCallback,\n promiseToServiceCallback,\n isValidUuid,\n applyMixins,\n isNode,\n isDuration,\n} from \"./util/utils\";\nexport { URLBuilder, URLQuery } from \"./url\";\nexport { AbortSignalLike } from \"@azure/abort-controller\";\nexport { delay } from \"./util/delay\";\n// legacy exports. Use core-tracing instead (and remove on next major version update of core-http).\nexport { createSpanFunction, SpanConfig } from \"./createSpanLegacy\";\n\n// Credentials\nexport { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from \"@azure/core-auth\";\nexport { AccessTokenCache, ExpiringAccessTokenCache } from \"./credentials/accessTokenCache\";\nexport { AccessTokenRefresher } from \"./credentials/accessTokenRefresher\";\nexport { BasicAuthenticationCredentials } from \"./credentials/basicAuthenticationCredentials\";\nexport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./credentials/apiKeyCredentials\";\nexport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nexport { TopicCredentials } from \"./credentials/topicCredentials\";\nexport { Authenticator } from \"./credentials/credentials\";\n\nexport { parseXML, stringifyXML } from \"./util/xml\";\nexport { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from \"./util/serializer.common\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js new file mode 100644 index 00000000..5deff577 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// NOTE: we've moved this code into core-tracing but these functions +// were a part of the GA'd library and can't be removed until the next major +// release. They currently get called always, even if tracing is not enabled. +import { createSpanFunction as coreTracingCreateSpanFunction } from "@azure/core-tracing"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export function createSpanFunction(args) { + return coreTracingCreateSpanFunction(args); +} +//# sourceMappingURL=createSpanLegacy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map new file mode 100644 index 00000000..93806976 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.js","sourceRoot":"","sources":["../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,oEAAoE;AACpE,4EAA4E;AAC5E,6EAA6E;AAE7E,OAAO,EAAQ,kBAAkB,IAAI,6BAA6B,EAAE,MAAM,qBAAqB,CAAC;AAoBhG;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAChC,IAAgB;IAKhB,OAAO,6BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js new file mode 100644 index 00000000..1ac13dd8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map new file mode 100644 index 00000000..8786e716 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC,YAAY;AAqB/D;;;;;;GAMG;AACH,MAAM,OAAO,wBAAwB;IAInC;;;OAGG;IACH,YAAY,uBAA+B,oBAAoB;QANvD,gBAAW,GAAiB,SAAS,CAAC;QAO5C,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;IACnD,CAAC;IAED;;;OAGG;IACH,cAAc,CAAC,WAAoC;QACjD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED;;OAEG;IACH,cAAc;QACZ,IACE,IAAI,CAAC,WAAW;YAChB,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;YACA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;SAC9B;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js new file mode 100644 index 00000000..a866fb9d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} +//# sourceMappingURL=accessTokenRefresher.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map new file mode 100644 index 00000000..3388d9be --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;GAIG;AACH,MAAM,OAAO,oBAAoB;IAI/B,YACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK;QAFpD,eAAU,GAAV,UAAU,CAAiB;QAC3B,WAAM,GAAN,MAAM,CAAmB;QACzB,yCAAoC,GAApC,oCAAoC,CAAgB;QALtD,eAAU,GAAG,CAAC,CAAC;IAMpB,CAAC;IAEJ;;;OAGG;IACI,OAAO;QACZ,+EAA+E;QAC/E,OAAO,CACL,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,CAC7F,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACK,KAAK,CAAC,QAAQ,CAAC,OAAwB;QAC7C,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnE,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,OAAwB;QACrC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js new file mode 100644 index 00000000..d3f2a4fa --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map new file mode 100644 index 00000000..db7ff150 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAkB7C;;GAEG;AACH,MAAM,OAAO,iBAAiB;IAU5B;;OAEG;IACH,YAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,0HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,uEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAI,GAAG,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js new file mode 100644 index 00000000..fc49c725 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 00000000..c5129004 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAI7C,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;GAEG;AACH,MAAM,OAAO,8BAA8B;IAiBzC;;;;;;OAMG;IACH,YACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B;QAhB5D;;;WAGG;QACH,wBAAmB,GAAW,4BAA4B,CAAC;QAczD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,MAAM,WAAW,GAAG,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,kBAAkB,GAAG,GAAG,IAAI,CAAC,mBAAmB,IAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js new file mode 100644 index 00000000..3cfbd7a0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map new file mode 100644 index 00000000..64678c65 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A function that receives a challenge and resolves a promise with a string token.\n * @deprecated The Authenticator type is not currently in use.\n */\nexport type Authenticator = (challenge: unknown) => Promise;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js new file mode 100644 index 00000000..f44565f4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map new file mode 100644 index 00000000..48908f39 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header).\n */\nexport interface ServiceClientCredentials {\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike/request to be signed.\n * @returns The signed request object;\n */\n signRequest(webResource: WebResourceLike): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js new file mode 100644 index 00000000..ac50243f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map new file mode 100644 index 00000000..b4ff2de9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;IACH,YAAY,QAAgB;QAC1B,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,MAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js new file mode 100644 index 00000000..1f5e7faf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map new file mode 100644 index 00000000..99be6d9a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { XhrHttpClient as DefaultHttpClient } from \"./xhrHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js new file mode 100644 index 00000000..79177d54 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map new file mode 100644 index 00000000..4e2660b2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { NodeFetchHttpClient as DefaultHttpClient } from \"./nodeFetchHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js b/node_modules/@azure/core-http/dist-esm/src/httpClient.js new file mode 100644 index 00000000..8908881c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map new file mode 100644 index 00000000..eeb169e5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../src/httpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy } from \"./policies/requestPolicy\";\n\n/**\n * An interface that can send HttpRequests and receive promised HttpResponses.\n */\nexport interface HttpClient extends RequestPolicy {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js new file mode 100644 index 00000000..48ec59ad --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "./defaultHttpClient"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new DefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map new file mode 100644 index 00000000..fd7907ab --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.js","sourceRoot":"","sources":["../../src/httpClientCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,IAAI,gBAAwC,CAAC;AAE7C,MAAM,UAAU,0BAA0B;IACxC,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC5C;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js new file mode 100644 index 00000000..e1e0ebac --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map new file mode 100644 index 00000000..2245c9f0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../src/httpHeaders.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAgB;IAChD,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;YAC3C,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;YACtC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;YACzC,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;YACvC,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;YAC5C,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,WAAW;IAGtB,YAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,UAAkB;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,UAAkB;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,UAAU;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7C,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;QAClC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,UAAsC,EAAE;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aACpC;SACF;aAAM;YACL,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACI,KAAK;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;QAClD,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SACpD;QACD,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;IACjD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js new file mode 100644 index 00000000..540684d9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map new file mode 100644 index 00000000..abf86599 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../src/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { WebResourceLike } from \"./webResource\";\n\n/**\n * The properties on an HTTP response which will always be present.\n */\nexport interface HttpResponse {\n /**\n * The raw request\n */\n request: WebResourceLike;\n\n /**\n * The HTTP response status (e.g. 200)\n */\n status: number;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeadersLike;\n}\n\ndeclare global {\n /**\n * Stub declaration of the browser-only Blob type.\n * Full type information can be obtained by including \"lib\": [\"dom\"] in tsconfig.json.\n */\n interface Blob {}\n}\n\n/**\n * Wrapper object for http request and response. Deserialized object is stored in\n * the `parsedBody` property when the response body is received in JSON or XML.\n */\nexport interface HttpOperationResponse extends HttpResponse {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders?: { [key: string]: any };\n\n /**\n * The response body as text (string format)\n */\n bodyAsText?: string | null;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody?: any;\n\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always undefined in node.js.\n */\n blobBody?: Promise;\n\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n}\n\n/**\n * The flattened response to a REST call.\n * Contains the underlying {@link HttpOperationResponse} as well as\n * the merged properties of the `parsedBody`, `parsedHeaders`, etc.\n */\nexport interface RestResponse {\n /**\n * The underlying HTTP response containing both raw and deserialized response data.\n */\n _response: HttpOperationResponse;\n\n /**\n * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body.\n */\n [key: string]: any;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js new file mode 100644 index 00000000..4e70194e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map new file mode 100644 index 00000000..168e6dcd --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js new file mode 100644 index 00000000..6f61f81d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel, message) { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map new file mode 100644 index 00000000..f4119d43 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH,MAAM,OAAO,yBAAyB;IACpC;;;OAGG;IACH,YAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,GAAG,CAAC,QAA8B,EAAE,OAAe;QACjD,MAAM,UAAU,GAAG,GAAG,oBAAoB,CAAC,QAAQ,CAAC,KAAK,OAAO,EAAE,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\n\n/**\n * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages\n * that can be used for debugging purposes.\n */\nexport interface HttpPipelineLogger {\n /**\n * The log level threshold for what logs will be logged.\n */\n minimumLogLevel: HttpPipelineLogLevel;\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * A HttpPipelineLogger that will send its logs to the console.\n */\nexport class ConsoleHttpPipelineLogger implements HttpPipelineLogger {\n /**\n * Create a new ConsoleHttpPipelineLogger.\n * @param minimumLogLevel - The log level threshold for what logs will be logged.\n */\n constructor(public minimumLogLevel: HttpPipelineLogLevel) {}\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void {\n const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`;\n switch (logLevel) {\n case HttpPipelineLogLevel.ERROR:\n console.error(logMessage);\n break;\n\n case HttpPipelineLogLevel.WARNING:\n console.warn(logMessage);\n break;\n\n case HttpPipelineLogLevel.INFO:\n console.log(logMessage);\n break;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js b/node_modules/@azure/core-http/dist-esm/src/log.js new file mode 100644 index 00000000..10dd4222 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-http"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js.map b/node_modules/@azure/core-http/dist-esm/src/log.js.map new file mode 100644 index 00000000..52607988 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AACnD,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,WAAW,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js new file mode 100644 index 00000000..ef2d1e3e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js @@ -0,0 +1,313 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "http"; +import * as https from "https"; +import * as tough from "tough-cookie"; +import { AbortController, AbortError } from "@azure/abort-controller"; +import { HttpHeaders } from "./httpHeaders"; +import { createProxyAgent, isUrlHttps } from "./proxyAgent"; +import { Transform } from "stream"; +import FormData from "form-data"; +import { RestError } from "./restError"; +import { logger } from "./log"; +import node_fetch from "node-fetch"; +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +export class ReportTransform extends Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +export class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController = new AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https.globalAgent : http.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch(input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + }); + httpRequest.headers.set("Cookie", cookieString); + } + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + async processRequest(operationResponse) { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader !== undefined) { + await new Promise((resolve, reject) => { + this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); + } + } + } +} +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map new file mode 100644 index 00000000..2ccc94e8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAc,gBAAgB,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AACxE,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAE7C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAGjC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAOpC,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB;IAEtB,OAAO,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BD,MAAM,OAAO,eAAgB,SAAQ,SAAS;IAS5C,YAAoB,gBAA2D;QAC7E,KAAK,EAAE,CAAC;QADU,qBAAgB,GAAhB,gBAAgB,CAA2C;QARvE,gBAAW,GAAW,CAAC,CAAC;IAUhC,CAAC;IATD,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B;QAChF,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACjB,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;IACtB,CAAC;CAKF;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB;IACnE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;QAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,KAAK,EAAE,CAAC;YACjB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAChC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;QAC7B,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAAhC;QA8LE,wFAAwF;QAChF,kBAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,oBAAe,GAAe,EAAE,CAAC;QAExB,cAAS,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAmHnF,CAAC;IApTC;;;;OAIG;IACH,KAAK,CAAC,WAAW,CAAC,WAA4B;;QAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAC9C,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;YAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,aAAa,GAAG,CAAC,KAAY,EAAE,EAAE;gBAC/B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iBACzB;YACH,CAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;SAClE;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,GAAG,EAAE;gBACd,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;SACzB;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;YACxB,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;YAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,0FAA0F;gBAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iBACjB;gBACD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;YAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oBACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,iCAAiC,WAAW,CAAC,WAAW,EAAE,EAAE,CAC7D,CAAC;iBACH;qBAAM;oBACL,kEAAkE;oBAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBAC5C;aACF;SACF;QAED,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;YACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gBACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gBACpB,CAAC,CAAC,WAAW,CAAC,IAAI;YACpB,CAAC,CAAC,SAAS,CAAC;QACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;YACpD,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;YACtD,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;YACjE,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gBAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;aAC/B;iBAAM;gBACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI,GAAG,kBAAkB,CAAC;SAC3B;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;QAEF,MAAM,WAAW,mBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;QAEF,IAAI,iBAAoD,CAAC;QACzD,IAAI;YACF,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAE/C,MAAM,SAAS,GACb,CAAA,MAAA,WAAW,CAAC,yBAAyB,0CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;YAEjC,iBAAiB,GAAG;gBAClB,OAAO,EAAE,OAAO;gBAChB,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;gBACvB,kBAAkB,EAAE,SAAS;oBAC3B,CAAC,CAAE,QAAQ,CAAC,IAAyC;oBACrD,CAAC,CAAC,SAAS;gBACb,UAAU,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,SAAS;aAC3D,CAAC;YAEF,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;YAC1D,IAAI,kBAAkB,EAAE;gBACtB,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;gBAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;oBAClC,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;oBACrE,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;oBACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;iBAC7D;qBAAM;oBACL,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;oBACrE,IAAI,MAAM,EAAE;wBACV,wEAAwE;wBACxE,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;qBAC7C;iBACF;aACF;YAED,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;YAE7C,OAAO,iBAAiB,CAAC;SAC1B;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;YACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;gBACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;gBACxC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,MAAM,UAAU,CAAC;SAClB;gBAAS;YACR,0BAA0B;YAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;gBAC5C,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;oBAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrC,eAAe,CAChB,CAAC;iBACH;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,GAAG,EAAE;;oBACT,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;gBACT,CAAC,CAAC;qBACD,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;oBACX,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;aACN;SACF;IACH,CAAC;IAQO,gBAAgB,CAAC,WAA4B;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAE5C,2DAA2D;QAC3D,6DAA6D;QAC7D,sDAAsD;QACtD,IAAI,WAAW,CAAC,aAAa,EAAE;YAC7B,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,QAAQ,IAAI,QAAQ,EAAE,CAAC;YACtD,MAAM,WAAW,GAAG,MAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,mCAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YACjD,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;YAEF,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;gBAClB,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;aACtD;iBAAM;gBACL,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;aACtC;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEzC,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;YAC1D,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;YAEF,IAAI,OAAO,EAAE;gBACX,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACzE;iBAAM;gBACL,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACvE;YAED,OAAO,KAAK,CAAC;SACd;aAAM;YACL,OAAO,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;SACvD;IACH,CAAC;IAED;;OAEG;IACH,6EAA6E;IAC7E,KAAK,CAAC,KAAK,CAAC,KAAwB,EAAE,IAAwB;QAC5D,OAAO,UAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;IACvE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,WAA4B;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;QAEnF,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YACxD,MAAM,YAAY,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE;oBAC/D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAC;qBACb;yBAAM;wBACL,OAAO,CAAC,MAAM,CAAC,CAAC;qBACjB;gBACH,CAAC,CAAC,CAAC;YACL,CAAC,CAAC,CAAC;YAEH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;SACjD;QAED,wBAAwB;QACxB,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;QAEvD,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;QAEtD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,iBAAwC;QAC3D,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,MAAM,eAAe,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YACpE,IAAI,eAAe,KAAK,SAAS,EAAE;gBACjC,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBAC1C,IAAI,CAAC,SAAU,CAAC,SAAS,CACvB,eAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,CAAC,GAAG,EAAE,EAAE;wBACN,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAC;yBACb;6BAAM;4BACL,OAAO,EAAE,CAAC;yBACX;oBACH,CAAC,CACF,CAAC;gBACJ,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tough from \"tough-cookie\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a \"Set-Cookie\" header.\n */\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader !== undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js new file mode 100644 index 00000000..21722f3c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map new file mode 100644 index 00000000..388d777d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../src/operationArguments.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase } from \"./webResource\";\n\n/**\n * A collection of properties that apply to a single invocation of an operation.\n */\nexport interface OperationArguments {\n /**\n * The parameters that were passed to the operation method.\n */\n [parameterName: string]: any;\n\n /**\n * The optional arugments that are provided to an operation.\n */\n options?: RequestOptionsBase;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js new file mode 100644 index 00000000..041c56bc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __rest } from "tslib"; +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = __rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} +//# sourceMappingURL=operationOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map new file mode 100644 index 00000000..293a7ad3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.js","sourceRoot":"","sources":["../../src/operationOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAuDlC;;;;GAIG;AACH,MAAM,UAAU,oCAAoC,CAClD,IAAO;IAEP,MAAM,EAAE,cAAc,EAAE,cAAc,KAA2B,IAAI,EAA1B,iBAAiB,UAAK,IAAI,EAA/D,oCAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;IAEnD,IAAI,cAAc,EAAE;QAClB,MAAM,mCAAQ,MAAM,GAAK,cAAc,CAAE,CAAC;KAC3C;IAED,IAAI,cAAc,EAAE;QAClB,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;QACtD,+HAA+H;QAC/H,MAAM,CAAC,WAAW,GAAI,cAAsB,aAAtB,cAAc,uBAAd,cAAc,CAAU,WAAW,CAAC;KAC3D;IAED,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js new file mode 100644 index 00000000..1361e298 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map new file mode 100644 index 00000000..f3a736a4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../src/operationParameter.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqDlC;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js new file mode 100644 index 00000000..6781aaca --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map new file mode 100644 index 00000000..bd4a1398 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../src/operationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\n\n/**\n * An OperationResponse that can be returned from an operation request for a single status code.\n */\nexport interface OperationResponse {\n /**\n * The mapper that will be used to deserialize the response headers.\n */\n headersMapper?: Mapper;\n\n /**\n * The mapper that will be used to deserialize the response body.\n */\n bodyMapper?: Mapper;\n\n /**\n * Indicates if this is an error response\n */\n isError?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js new file mode 100644 index 00000000..2ba9e6c8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperType } from "./serializer"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map new file mode 100644 index 00000000..2843db2b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../src/operationSpec.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA8FtD;;;GAGG;AACH,MAAM,UAAU,4BAA4B,CAAC,aAA4B;IACvE,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;IACjC,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SAChC;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js new file mode 100644 index 00000000..2abe14a8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pipelineOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map new file mode 100644 index 00000000..d1dc2e25 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.js","sourceRoot":"","sources":["../../src/pipelineOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DeserializationOptions } from \"./policies/deserializationPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nimport { LogPolicyOptions } from \"./policies/logPolicy\";\nimport { ProxyOptions } from \"./serviceClient\";\nimport { RedirectOptions } from \"./policies/redirectPolicy\";\nimport { RetryOptions } from \"./policies/exponentialRetryPolicy\";\nimport { UserAgentOptions } from \"./policies/userAgentPolicy\";\n\n/**\n * Defines options that are used to configure the HTTP pipeline for\n * an SDK client.\n */\nexport interface PipelineOptions {\n /**\n * The HttpClient implementation to use for outgoing HTTP requests. Defaults\n * to DefaultHttpClient.\n */\n httpClient?: HttpClient;\n\n /**\n * Options that control how to retry failed requests.\n */\n retryOptions?: RetryOptions;\n\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n\n /**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\n keepAliveOptions?: KeepAliveOptions;\n\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n}\n\n/**\n * Defines options that are used to configure internal options of\n * the HTTP pipeline for an SDK client.\n */\nexport interface InternalPipelineOptions extends PipelineOptions {\n /**\n * Options to configure API response deserialization.\n */\n deserializationOptions?: DeserializationOptions;\n\n /**\n * Options to configure request/response logging.\n */\n loggingOptions?: LogPolicyOptions;\n\n /**\n * Configure whether to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n\n /**\n * Send JSON Array payloads as NDJSON.\n */\n sendStreamingJson?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 00000000..22abdb45 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "../policies/requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "../util/delay"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map new file mode 100644 index 00000000..b0ac35f0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.js","sourceRoot":"","sources":["../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EACL,iBAAiB,GAIlB,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAiCtC,sDAAsD;AACtD,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AACH,MAAM,UAAU,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,eAAe,CAAC,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB;QAC7D,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CAAC;YACH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;QACnD,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClE,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js new file mode 100644 index 00000000..1390c841 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { XML_CHARKEY } from "../util/serializer.common"; +import { MapperType } from "../serializer"; +import { RestError } from "../restError"; +import { parseXML } from "../util/xml"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +export const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map new file mode 100644 index 00000000..900169ba --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAqB,WAAW,EAAE,MAAM,2BAA2B,CAAC;AAE3E,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAG3C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AA+BvC;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC;IAElC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF,MAAM,CAAC,MAAM,6BAA6B,GAA2B;IACnE,oBAAoB,EAAE;QACpB,IAAI,EAAE,uBAAuB;QAC7B,GAAG,EAAE,sBAAsB;KAC5B;CACF,CAAC;AAEF;;;GAGG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAK1D,YACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE;;QAEtC,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;QAExC,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,MAAA,cAAc,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC7D,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,EAAE,EAAE,CACpF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;SAC5B,CAAC,CACH,CAAC;IACJ,CAAC;CACF;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,UAA6B,EAAE;;IAE/B,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;QAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;QACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;IACF,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,EAAE,EAAE;QACjB,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;QAC3D,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;QAE1D,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;QACF,IAAI,KAAK,EAAE;YACT,MAAM,KAAK,CAAC;SACb;aAAM,IAAI,oBAAoB,EAAE;YAC/B,OAAO,cAAc,CAAC;SACvB;QAED,oEAAoE;QACpE,sCAAsC;QACtC,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;gBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;gBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;4BACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;4BAC7D,CAAC,CAAC,EAAE,CAAC;iBACV;gBACD,IAAI;oBACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;iBACH;gBAAC,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,iDAAiD,cAAc,CAAC,UAAU,EAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;oBACF,MAAM,SAAS,CAAC;iBACjB;aACF;iBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;gBAC9C,uGAAuG;gBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;aAC7E;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,EAC5B,OAAO,CACR,CAAC;aACH;SACF;QAED,OAAO,cAAc,CAAC;IACxB,CAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;IACjE,OAAO,CACL,mBAAmB,CAAC,MAAM,KAAK,CAAC;QAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAC3E,CAAC;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C;;IAE3C,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;IACtF,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;QACvE,CAAC,CAAC,iBAAiB;QACnB,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAEnB,IAAI,oBAAoB,EAAE;QACxB,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;aACrD;SACF;aAAM;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;SACrD;KACF;IAED,MAAM,iBAAiB,GAAG,YAAY,aAAZ,YAAY,cAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;IAC1E,MAAM,SAAS,GACb,CAAA,MAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;QAC5E,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;QACnC,CAAC,CAAC,2BAA2B,cAAc,CAAC,MAAM,EAAE;QACpD,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;IAE1C,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;IAEF,yFAAyF;IACzF,sDAAsD;IACtD,IAAI,CAAC,iBAAiB,EAAE;QACtB,MAAM,KAAK,CAAC;KACb;IAED,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;IACvD,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;QACF,iFAAiF;QACjF,mDAAmD;QACnD,IAAI,cAAc,CAAC,UAAU,EAAE;YAC7B,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;YAC7C,IAAI,WAAW,CAAC;YAChB,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;gBACzC,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;wBAChB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;iBACvF;gBACD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;aACH;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;YACzE,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;gBACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;aACvC;YAED,IAAI,iBAAiB,EAAE;gBACrB,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;aAC1C;SACF;QAED,mFAAmF;QACnF,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;SACH;KACF;IAAC,OAAO,YAAiB,EAAE;QAC1B,KAAK,CAAC,OAAO,GAAG,UAAU,YAAY,CAAC,OAAO,mDAAmD,cAAc,CAAC,UAAU,6BAA6B,CAAC;KACzJ;IAED,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC;;IAEjC,MAAM,YAAY,GAAG,CAAC,GAA6B,EAAkB,EAAE;QACrE,MAAM,GAAG,GAAG,UAAU,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,GAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,MAAM,SAAS,GACb,CAAA,MAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;QAClF,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC/C,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;QAC9C,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,EAAE,EAAE;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;iBACxB,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;gBACb,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js new file mode 100644 index 00000000..00afedf0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json + */ +import { BaseRequestPolicy, } from "./requestPolicy"; +const DisbleResponseDecompressionNotSupportedInBrowser = new Error("DisableResponseDecompressionPolicy is not supported in browser environment"); +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (_nextPolicy, _options) => { + throw DisbleResponseDecompressionNotSupportedInBrowser; + }, + }; +} +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw DisbleResponseDecompressionNotSupportedInBrowser; + } + async sendRequest(_request) { + throw DisbleResponseDecompressionNotSupportedInBrowser; + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map new file mode 100644 index 00000000..88145bcb --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,gDAAgD,GAAG,IAAI,KAAK,CAChE,4EAA4E,CAC7E,CAAC;AAEF;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,gDAAgD,CAAC;QACzD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,gDAAgD,CAAC;IACzD,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,QAAqB;QAC5C,MAAM,gDAAgD,CAAC;IACzD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n */\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\nconst DisbleResponseDecompressionNotSupportedInBrowser = new Error(\n \"DisableResponseDecompressionPolicy is not supported in browser environment\"\n);\n\n/**\n * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting\n * to use it will results in error being thrown.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n },\n };\n}\n\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n }\n\n public async sendRequest(_request: WebResource): Promise {\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js new file mode 100644 index 00000000..bf033e00 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map new file mode 100644 index 00000000..5a50cd8f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;IACH,uCAAuC;IACvC,wEAAwE;IACxE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js new file mode 100644 index 00000000..56768cc1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { Constants } from "../util/constants"; +import { RestError } from "../restError"; +import { delay } from "../util/delay"; +import { logger } from "../log"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export var RetryMode; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(RetryMode || (RetryMode = {})); +export const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} +async function retry(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry(policy, request, res, retryData); + } + catch (err) { + return retry(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map new file mode 100644 index 00000000..2ec16229 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,SAMX;AAND,WAAY,SAAS;IACnB;;;OAGG;IACH,uDAAW,CAAA;AACb,CAAC,EANW,SAAS,KAAT,SAAS,QAMpB;AA8BD,MAAM,CAAC,MAAM,mBAAmB,GAAiB;IAC/C,UAAU,EAAE,0BAA0B;IACtC,cAAc,EAAE,6BAA6B;IAC7C,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;GAEG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAc3D;;;;;;;OAOG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;IAC9E,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,iBAAiB,CAAC,aAAqC;QAC9D,MAAM,UAAU,GAAG,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;YACtF,OAAO,KAAK,CAAC;SACd;QAED,IACE,UAAU,KAAK,SAAS;YACxB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;QACnC,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;KAC1C,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,uBAAuB,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC;QAC9D,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;SAC/C;QAAC,OAAO,GAAQ,EAAE;YACjB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;SACzD;KACF;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,MAAM,GAAG,CAAC;KACX;SAAM;QACL,OAAO,QAAQ,CAAC;KACjB;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js new file mode 100644 index 00000000..868c1b71 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 00000000..a05940d6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB;IAE9C,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,6BAA8B,SAAQ,iBAAiB;IAClE,YACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,yBAAoB,GAApB,oBAAoB,CAAQ;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;SACnE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js new file mode 100644 index 00000000..22581800 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * By default, HTTP connections are maintained for future requests. + */ +export const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=keepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map new file mode 100644 index 00000000..4cec3806 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.js","sourceRoot":"","sources":["../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAgBzB;;GAEG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAqB;IACvD,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,gBAAmC;IACjE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;QAC/F,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC;QAEnD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,qBAAgB,GAAhB,gBAAgB,CAAkB;IAGrD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js new file mode 100644 index 00000000..05d1225a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Sanitizer } from "../util/sanitizer"; +import { logger as coreLogger } from "../log"; +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger = coreLogger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map new file mode 100644 index 00000000..ac9f9838 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,MAAM,IAAI,UAAU,EAAE,MAAM,QAAQ,CAAC;AAiC9C;;;;GAIG;AACH,MAAM,UAAU,SAAS,CAAC,iBAAmC,EAAE;IAC7D,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAC5D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,iBAAiB;IA4C9C,YACE,UAAyB,EACzB,OAA6B,EAC7B,EACE,MAAM,GAAG,UAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE;QAExB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;IACjF,CAAC;IApDD;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B;QAC3D,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;IACzD,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC;QACnE,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;IACjE,CAAC;IAgBM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAEvE,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC9F,CAAC;IAEO,UAAU,CAAC,OAAwB;QACzC,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC9D,CAAC;IAEO,WAAW,CAAC,QAA+B;QACjD,IAAI,CAAC,MAAM,CAAC,yBAAyB,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACrE,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 00000000..36467994 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const navigator = self.navigator; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 00000000..b6f19562 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js new file mode 100644 index 00000000..e1210a9a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 00000000..738f7c7c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js new file mode 100644 index 00000000..fe17cf34 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const { Platform } = require("react-native"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const { major, minor, patch } = Platform.constants.reactNativeVersion; + const runtimeInfo = { + key: "react-native", + value: `${major}.${minor}.${patch}`, + }; + const osInfo = { + key: "OS", + value: `${Platform.OS}-${Platform.Version}`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.native.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map new file mode 100644 index 00000000..641603ba --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,+FAA+F;AAE7I,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAC,SAAS,CAAC,kBAAkB,CAAC;IACtE,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,cAAc;QACnB,KAAK,EAAE,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,EAAE;KACpC,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,GAAG,QAAQ,CAAC,EAAE,IAAI,QAAQ,CAAC,OAAO,EAAE;KAC5C,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"react-native\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nconst { Platform } = require(\"react-native\"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const { major, minor, patch } = Platform.constants.reactNativeVersion;\n const runtimeInfo = {\n key: \"react-native\",\n value: `${major}.${minor}.${patch}`,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `${Platform.OS}-${Platform.Version}`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js new file mode 100644 index 00000000..19119e55 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// BaseRequestPolicy has a protected constructor. +/* eslint-disable @typescript-eslint/no-useless-constructor */ +import { BaseRequestPolicy, } from "./requestPolicy"; +export function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map new file mode 100644 index 00000000..4513fcf2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.js","sourceRoot":"","sources":["../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,iDAAiD;AACjD,8DAA8D;AAE9D,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,YAAY;IAC1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC/C,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB;IAC1C;;OAEG;IACH,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,4DAA4D;QAC5D,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;YACtC,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;aACzE;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js new file mode 100644 index 00000000..260366e2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +const proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: (_nextPolicy, _options) => { + throw proxyNotSupportedInBrowser; + }, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw proxyNotSupportedInBrowser; + } + sendRequest(_request) { + throw proxyNotSupportedInBrowser; + } +} +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map new file mode 100644 index 00000000..82643681 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB,MAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,0BAA0B,CAAC;IACnC,CAAC;IAEM,WAAW,CAAC,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js new file mode 100644 index 00000000..031230bc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +import { getEnvironmentValue } from "../util/utils"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map new file mode 100644 index 00000000..484a8f4c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EAAE,mBAAmB,EAAE,MAAM,eAAe,CAAC;AAEpD;;;GAGG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC,yDAAyD;AACzD,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;GAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC;IAElC,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC9B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;IACzB,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,aAA6B,EAC7B,OAGC;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,IAAI,CAAC,iBAAiB,EAAE;QACtB,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;KAC1C;IACD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,EAAE,EAAE;YAChF,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,kBAAa,GAAb,aAAa,CAAe;QAC3B,sBAAiB,GAAjB,iBAAiB,CAAW;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,MAAA,IAAI,CAAC,iBAAiB,mCAAI,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,iBAAiB,CACvD,EACD;YACA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js new file mode 100644 index 00000000..2a8a0597 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { URLBuilder } from "../url"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +export const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map new file mode 100644 index 00000000..a4c6c8c1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;GAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBxC,MAAM,CAAC,MAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,cAAc,GAAG,EAAE;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,iBAAiB;IACnD,YAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE;QAC5F,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,eAAU,GAAV,UAAU,CAAK;IAE9F,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEtB,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,+DAA+D;QAC/D,+EAA+E;QAC/E,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;KACnE;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js new file mode 100644 index 00000000..639c271b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +/** + * The base class from which all request policies derive. + */ +export class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map new file mode 100644 index 00000000..eb09d865 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAsB/D;;GAEG;AACH,MAAM,OAAgB,iBAAiB;IACrC;;OAEG;IACH;IACE;;OAEG;IACM,WAA0B;IACnC;;OAEG;IACM,QAAkC;QAJlC,gBAAW,GAAX,WAAW,CAAe;QAI1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAQJ;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;CACF;AAsBD;;GAEG;AACH,MAAM,OAAO,oBAAoB;IAC/B,YAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js new file mode 100644 index 00000000..5f5e3fff --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { delay } from "../util/delay"; +export function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map new file mode 100644 index 00000000..120c82cf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,MAAM,UAAU,oBAAoB,CAAC,YAAY,GAAG,EAAE;IACpD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD,YACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE;QAE3B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,kBAAa,GAAb,aAAa,CAAK;IAG7B,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC;iBAClB,IAAI,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC3B,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK;IAEnB,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAQ,EAAE;YACjB,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,GAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,KAAK,UAAU,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,MAAM,OAAO,GAAG,GAAG,SAAS,aAAa,QAAQ,kCAAkC,CAAC;IACpF,MAAM,MAAM,GAAG,GAAG,SAAS,aAAa,QAAQ,yBAAyB,CAAC;IAC1E,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAClE,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,2CAA2C,CAAC,CAAC;KAC7F;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;GAQG;AACH,KAAK,UAAU,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAC7D,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;IAC3B,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;QACrF,OAAO,IAAI,CAAC;KACb;SAAM;QACL,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;KAC5D;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js new file mode 100644 index 00000000..c298812c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map new file mode 100644 index 00000000..1fbcc984 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAClD,YACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD;QAEvD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,2BAAsB,GAAtB,sBAAsB,CAA0B;IAGzD,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE,CACpD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js new file mode 100644 index 00000000..84c6793f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_MIN_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { delay } from "../util/delay"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 00000000..ec403a8b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAG5C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC;;;;;;;GAOG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAM3D,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAEpD,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB;QAC9E,IACE,KAAK;YACL,KAAK,CAAC,IAAI;YACV,CAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;gBAC3B,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;QACxF,mFAAmF;QACnF,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;SACxD;QAAC,OAAO,SAAc,EAAE;YACvB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;SACxE;KACF;SAAM;QACL,IAAI,GAAG,EAAE;YACP,qFAAqF;YACrF,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;SACxC;QACD,OAAO,iBAAiB,CAAC;KAC1B;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js new file mode 100644 index 00000000..e20e000e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { AbortError } from "@azure/abort-controller"; +import { Constants } from "../util/constants"; +import { DEFAULT_CLIENT_MAX_RETRY_COUNT } from "../util/throttlingRetryStrategy"; +import { delay } from "../util/delay"; +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await delay(delayInMs, undefined, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map new file mode 100644 index 00000000..35066f9f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,8BAA8B,EAAE,MAAM,iCAAiC,CAAC;AAGjF,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAMtC,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;GASG;AACH,MAAM,UAAU,qBAAqB;IACnC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACxD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAI1D,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC;QAEjC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,oBAAe,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;IACzE,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;QACnD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;QACzE,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;YAC/C,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;YACA,OAAO,QAAQ,CAAC;SACjB;aAAM;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;SACpD;IACH,CAAC;IAEO,KAAK,CAAC,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC;;QAEnC,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;QAEF,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;YAChE,IAAI,SAAS,EAAE;gBACb,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;gBAE1B,MAAM,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE;oBAChC,WAAW,EAAE,WAAW,CAAC,WAAW;oBACpC,aAAa,EAAE,oBAAoB;iBACpC,CAAC,CAAC;gBAEH,IAAI,MAAA,WAAW,CAAC,WAAW,0CAAE,OAAO,EAAE;oBACpC,MAAM,IAAI,UAAU,CAAC,oBAAoB,CAAC,CAAC;iBAC5C;gBAED,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;oBACzD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBACtC;qBAAM;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;SACF;QAED,OAAO,YAAY,CAAC;IACtB,CAAC;IAEM,MAAM,CAAC,qBAAqB,CAAC,WAAmB;QACrD,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEM,MAAM,CAAC,yBAAyB,CAAC,WAAmB;QACzD,IAAI;YACF,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, undefined, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js new file mode 100644 index 00000000..2a1b6b3f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { SpanKind, SpanStatusCode, createSpanFunction, getTraceParentHeader, isSpanContextValid, } from "@azure/core-tracing"; +import { logger } from "../log"; +const createSpan = createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = getTraceParentHeader(spanContext); + if (traceParentHeader && isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map new file mode 100644 index 00000000..16c3c855 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.js","sourceRoot":"","sources":["../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAEL,QAAQ,EACR,cAAc,EACd,kBAAkB,EAClB,oBAAoB,EACpB,kBAAkB,GACnB,MAAM,qBAAqB,CAAC;AAG7B,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IACpC,aAAa,EAAE,EAAE;IACjB,SAAS,EAAE,EAAE;CACd,CAAC,CAAC;AAYH;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,iBAAuC,EAAE;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAChE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAGlD,YACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;IAC5C,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;YAC7D,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;YACxC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;YAChC,MAAM,GAAG,CAAC;SACX;IACH,CAAC;IAED,aAAa,CAAC,OAAwB;;QACpC,IAAI;YACF,sHAAsH;YACtH,uFAAuF;YACvF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,QAAQ,OAAO,CAAC,MAAM,EAAE,EAAE;gBACpD,cAAc,EAAE;oBACd,WAAW,kCACL,OAAe,CAAC,WAAW,KAC/B,IAAI,EAAE,QAAQ,CAAC,MAAM,GACtB;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;iBACvC;aACF,CAAC,CAAC;YAEH,wDAAwD;YACxD,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,SAAS,CAAC;aAClB;YAED,MAAM,oBAAoB,GAAG,MAAA,OAAO,CAAC,cAAc,0CAAE,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;YAE1F,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;gBAC5C,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;aACzD;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;aAC7B,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACtD;YAED,cAAc;YACd,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;YACvC,MAAM,iBAAiB,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;YAC5D,IAAI,iBAAiB,IAAI,kBAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;gBACtD,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;gBAChF,0FAA0F;gBAC1F,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC/C;aACF;YACD,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;YACrF,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;YACjE,IAAI,gBAAgB,EAAE;gBACpB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACzD;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,EAAE;aACxB,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js new file mode 100644 index 00000000..527856c2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map new file mode 100644 index 00000000..cbc2aa12 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAoB7C,SAAS,cAAc;IACrB,MAAM,aAAa,GAAG;QACpB,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,GAAG,IAAI,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;GAGG;AACH,MAAM,UAAU,wBAAwB;IACtC,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;QAC7E,CAAC,CAAC,sBAAsB,EAAE;QAC1B,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IACxB,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;QACjF,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD,YACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB;QAE7B,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAAsB;QAC7B,cAAS,GAAT,SAAS,CAAQ;QACjB,gBAAW,GAAX,WAAW,CAAQ;IAG/B,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,kBAAkB,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js new file mode 100644 index 00000000..47469975 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map new file mode 100644 index 00000000..741f8ef9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../src/proxyAgent.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAGnC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;KAClE;IACD,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;KAChG;IACD,MAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC;KAC9D;IAED,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;IAEpD,MAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC1D,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY;IAC/B,sFAAsF;IACtF,oFAAoF;IACpF,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js new file mode 100644 index 00000000..ff08c3dc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map new file mode 100644 index 00000000..2a917a84 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,qBAqBX;AArBD,WAAY,qBAAqB;IAC/B;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,mCAAU,CAAA;IACV;;OAEG;IACH,oCAAW,CAAA;IACX;;OAEG;IACH,wCAAe,CAAA;AACjB,CAAC,EArBW,qBAAqB,KAArB,qBAAqB,QAqBhC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js b/node_modules/@azure/core-http/dist-esm/src/restError.js new file mode 100644 index 00000000..88d4a780 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Sanitizer } from "./util/sanitizer"; +import { custom } from "./util/inspect"; +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js.map b/node_modules/@azure/core-http/dist-esm/src/restError.js.map new file mode 100644 index 00000000..8270ecfe --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../src/restError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAExC,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,KAAK;IA8BlC,YACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;IACnD,CAAC;IAED;;OAEG;IACH,CAAC,MAAM,CAAC;QACN,OAAO,cAAc,IAAI,CAAC,OAAO,OAAO,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;IAC1E,CAAC;;AAnDD;;GAEG;AACa,4BAAkB,GAAW,oBAAoB,CAAC;AAClE;;GAEG;AACa,qBAAW,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js b/node_modules/@azure/core-http/dist-esm/src/serializer.js new file mode 100644 index 00000000..c8b47624 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js @@ -0,0 +1,906 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable eqeqeq */ +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = base64.encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!utils.isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js.map b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map new file mode 100644 index 00000000..86e9af37 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../src/serializer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,2BAA2B;AAE3B,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AAEvF,qLAAqL;AAErL;;;;GAIG;AACH,MAAM,OAAO,UAAU;IACrB;IACE;;OAEG;IACa,eAAuC,EAAE;IACzD;;OAEG;IACa,KAAe;QAJf,iBAAY,GAAZ,YAAY,CAA6B;QAIzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ;;;;;OAKG;IACH,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB;QACpE,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,EACb,EAAE;YACT,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,iBAAiB,KAAK,oCAAoC,cAAc,MAAM,eAAe,GAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;gBAC/D,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,EAAE,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;OAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAE1C,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,uBAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,+BAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,kBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;SACH;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;gBACjE;;;;mBAIG;gBACH,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;oBAC1C,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;oBACA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;iBAC7C;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;gBAC1C,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;gBAC7C,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;gBAC1E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;aAC5C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;aAClD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAsB,CAAC,CAAC;aACvD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;aACxD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,MAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;IAChD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;YAC3B,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YACxC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,0BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,gBAAgB,KAAK,2BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YAC7C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,gBAAgB,KAAK,4CAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,2BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,CAAC,IAAI,KAAK,YAAY,IAAI,CAAC,EACpF;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,uGAAuG,CACrH,CAAC;aACH;SACF;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,mBAAmB,CACnF,CAAC;KACH;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;QAC5C,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,6BAA6B,UAAU,2BAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,GAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KAC7C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;KAC9C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YACtC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;YACxD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,6DAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,qEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,sDAAsD,KAAK,IAAI,CAC7E,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,yBAAyB,CAAC,CAAC;KACzD;IACD,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;YACrC,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;gBAC7C,CAAC,CAAC,SAAS,WAAW,CAAC,kBAAkB,EAAE;gBAC3C,CAAC,CAAC,OAAO,CAAC;YACZ,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;gBACzC,SAAS,CAAC,CAAC,CAAC,qBAAQ,eAAe,CAAE,CAAC;gBACtC,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;gBACnD,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;SACF;aAAM;YACL,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;SAChC;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,0BAA0B,CAAC,CAAC;KAC1D;IACD,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACrC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAC1F,gFAAgF;QAChF,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;KACrF;IAED,kDAAkD;IAClD,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;QAC9B,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,EAAE,CAAC;QAC1D,OAAO,MAAM,CAAC;KACf;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;KAC/C;IAED,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,oCAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,IAAI,CACN,CAAC;KACH;IAED,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,mDAAmD,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,CAAC;SAC/F;QACD,UAAU,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;gBACnD,WAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,cACpC,MAAM,CAAC,IAAI,CAAC,SACd,iBAAiB,UAAU,IAAI,CAClC,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;YACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;oBAC5B,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;wBACxB,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;wBACA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;oBAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;wBACxC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE;wBACtC,CAAC,CAAC,OAAO,CAAC;oBACZ,YAAY,CAAC,WAAW,CAAC,mCACpB,YAAY,CAAC,WAAW,CAAC,KAC5B,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,GAChC,CAAC;iBACH;gBACD,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;gBAEF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;oBACjF,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;wBAC1C,uEAAuE;wBACvE,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBACvD;yBAAM,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/C,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,CAAC,EAAE,KAAK,EAAE,CAAC;qBACtE;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;qBAChC;iBACF;aACF;SACF;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC/F,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC1C,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;gBACnC,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,KAAK,cAAc,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;iBACH;aACF;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;QAC1C,OAAO,eAAe,CAAC;KACxB;IAED,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;QAChD,CAAC,CAAC,SAAS,cAAc,CAAC,kBAAkB,EAAE;QAC9C,CAAC,CAAC,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,cAAc,CAAC,YAAY,EAAE,CAAC;IAEjE,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACpD,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;YAChC,OAAO,eAAe,CAAC;SACxB;aAAM;YACL,MAAM,MAAM,qBAAa,eAAe,CAAE,CAAC;YAC3C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;KACF;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;IAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;IACnC,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC;IACtF,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;;IAEpC,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;QACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;gBACjD,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;aACH;iBAAM;gBACL,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B;;;;;;;;;;;;;sBAaE;oBACF,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBACvC,MAAM,WAAW,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,cAAe,CAAC,mCAAI,EAAE,CAAC;oBACrD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;qBAAM;oBACL,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBAC7C,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;aACF;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;oBAC7C,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;wBAC3D,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;qBACtB;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,EAAW,EAAE;YACjE,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;SAC7F;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,GAAG,UAAU,IAAI,CAAC,GAAG,EACrB,OAAO,CACR,CAAC;SACH;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AAyTD;;GAEG;AACH,MAAM,UAAU,eAAe,CAAC,WAAoB;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;SACnE;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;QACnB,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,2DAA2D;AAC3D,MAAM,CAAC,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js new file mode 100644 index 00000000..b817630b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js @@ -0,0 +1,622 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "./util/utils"; +import { MapperType } from "./serializer"; +import { DefaultDeserializationOptions, deserializationPolicy, } from "./policies/deserializationPolicy"; +import { DefaultKeepAliveOptions, keepAlivePolicy } from "./policies/keepAlivePolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { DefaultRetryOptions, exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { logPolicy } from "./policies/logPolicy"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { getStreamResponseStatusCodes } from "./operationSpec"; +import { WebResource, isWebResourceLike, } from "./webResource"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +import { isNode } from "./util/utils"; +import { isTokenCredential } from "@azure/core-auth"; +import { getDefaultUserAgentHeaderName, getDefaultUserAgentValue, userAgentPolicy, } from "./policies/userAgentPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { URLBuilder } from "./url"; +import { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +import { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { getCachedDefaultHttpClient } from "./httpClientCache"; +import { logger } from "./log"; +import { ndJsonPolicy } from "./policies/ndJsonPolicy"; +import { proxyPolicy } from "./policies/proxyPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { stringifyXML } from "./util/xml"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { tracingPolicy } from "./policies/tracingPolicy"; +/** + * ServiceClient sends service requests and receives responses. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + const propertyPathLength = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + const scopes = options.credentialScopes; + return Array.isArray(scopes) + ? scopes.map((scope) => new URL(scope).toString()) + : new URL(scopes).toString(); + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map new file mode 100644 index 00000000..4c669b17 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../src/serviceClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EACL,6BAA6B,EAE7B,qBAAqB,GACtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,uBAAuB,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AACtF,OAAO,EAAE,sBAAsB,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACnF,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAEhG,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EAGL,0BAA0B,EAC1B,8BAA8B,GAC/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAiB,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAGL,WAAW,EAEX,iBAAiB,GAClB,MAAM,eAAe,CAAC;AACvB,OAAO,EAGL,oBAAoB,GACrB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AACvF,OAAO,EAAmB,MAAM,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACtE,OAAO,EACL,6BAA6B,EAC7B,wBAAwB,EACxB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAMpC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAC/D,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AACvD,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAmGzD;;GAEG;AACH,MAAM,OAAO,aAAa;IAsBxB;;;;OAIG;IACH,YACE,WAAwD;IACxD,iEAAiE;IACjE,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;YAC5D,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;YACpE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;gBACF,wEAAwE;gBACxE,yEAAyE;gBACzE,oEAAoE;gBACpE,6EAA6E;gBAC7E,sEAAsE;gBACtE,gCAAgC;gBAChC,MAAM,oBAAoB,GAA+B,GAAG,EAAE;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;oBAC3E,4DAA4D;oBAC5D,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,mKAAmK,CACpK,CAAC;6BACH;4BAED,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;gCAC/E,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;6BACH;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;wBACpE,CAAC;qBACF,CAAC;gBACJ,CAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;aAC5C;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;gBACvE,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;gBAChF,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;aAChD;iBAAM,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;gBAC5D,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;aAC1F;YAED,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;YAC7D,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,yEAAyE;gBACzE,2CAA2C;gBAC3C,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACzD,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;QACxE,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;QAEvD,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;oBACtD,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;oBAC1D,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;wBACF,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;4BACA,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,6EAA6E;oCAC7E,SAAS;iCACV;qCAAM;oCACL,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;4CACxB,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCAC9D;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;4BACxC,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAC5D,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;wBACF,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;gCAC1C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,+HAA+H;oBAC9H,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBACxD;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;oBAC1B,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;iBACrD;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;oBACjF,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;iBAC3D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;gBACvD,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;aACrF;YAED,IAAI,WAAkC,CAAC;YACvC,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;aACnD;YAAC,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;aAC1B;YACD,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;oBAC7B,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;wBAClD,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;iBACH;gBACD,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;aAC3C;iBAAM;gBACL,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;aACH;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;;IAE5B,MAAM,iBAAiB,GAAG,MAAA,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,mCAAI,EAAE,CAAC;IAC9E,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;QAC1C,WAAW,EAAE,MAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;QACnD,UAAU,EAAE,MAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;IAEF,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QACpD,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;QACb,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;gBAEF,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,SAAS,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;oBAC9E,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;oBACF,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CACF,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CAAC,CAAC;qBACJ;iBACF;qBAAM,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;oBAC9B,CAAC,CAAA,MAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;oBACA,oEAAoE;oBACpE,2BAA2B;oBAC3B,OAAO;iBACR;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,UAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,GAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;YAChE,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;gBAC3E,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED;;GAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC;IAEpC,2FAA2F;IAC3F,sDAAsD;IACtD,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAC;QACnD,OAAO,MAAM,CAAC;KACf;IAED,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,iBAAiB,EAAE;QACrB,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KACnC;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IACD,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAI,MAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;IAEnD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;QACrC,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;KAC7C;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;QAErE,iEAAiE;QACjE,iCAAiC;QACjC,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;YACtD,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SAC1C;QAED,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1C;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,GACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,GACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,GACtB,eAAe,CAAC,eAAe,CACnC,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;KACxE;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,GAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;IAEF,MAAM,cAAc,qBACf,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KACzE;IAED,IAAI,iBAAiB,EAAE;QACrB,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KAChD;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;IAEvD,IAAI,MAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;QAC1D,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACnE;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,MAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,MAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;IACxE,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;SACtF;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;YAC3F,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;YAC1E,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,MAAM,oBAAoB,GAAG,CAC3B,GAAM,EAGN,EAAE;QACF,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YAC7C,KAAK,EAAE,SAAS;SACjB,CAEA,CAAC;IACJ,CAAC,CAAC;IAEF,IAAI,UAAU,EAAE;QACd,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,iCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,MAAM,aAAa,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;gBAC9C,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,iCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB;IAEhB,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;QAC7B,MAAM,MAAM,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACxC,OAAO,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;YAC1B,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;YAClD,CAAC,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;KAChC;IAED,IAAI,OAAO,EAAE;QACX,OAAO,GAAG,OAAO,WAAW,CAAC;KAC9B;IACD,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback, isNode } from \"./util/utils\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n const scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map((scope) => new URL(scope).toString())\n : new URL(scopes).toString();\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js b/node_modules/@azure/core-http/dist-esm/src/url.js new file mode 100644 index 00000000..35503a50 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js @@ -0,0 +1,597 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +export class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js.map b/node_modules/@azure/core-http/dist-esm/src/url.js.map new file mode 100644 index 00000000..2be3eb9c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../src/url.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH,MAAM,OAAO,QAAQ;IAArB;QACmB,cAAS,GAAwD,EAAE,CAAC;IAiIvF,CAAC;IA/HC;;OAEG;IACI,GAAG;QACR,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,IAAI;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;QACF,IAAI,aAAa,EAAE;YACjB,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;gBACnE,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;oBAChD,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,kBAAkB,CAAC,QAAQ,EAAE,CAAC;gBAClC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,GAAG,CAAC,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,GAAG,aAAa,IAAI,qBAAqB,EAAE,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAI,GAAG,aAAa,IAAI,cAAc,EAAE,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED;;GAEG;AACH,MAAM,OAAO,UAAU;IAOrB;;;OAGG;IACI,SAAS,CAAC,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAiC;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACtD,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B;QAC/E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAAC,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,SAA6B,CAAC;YAClC,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACpC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,8BAA8B,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,KAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAMD,MAAM,OAAO,QAAQ;IACnB,YAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEhF,MAAM,CAAC,MAAM,CAAC,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,YAAY;IAMvB,YAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACtF,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,IAAI;QACT,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,mCAAmC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;CACF;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,EAAE,EAAE,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B;IACrF,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,EAAE,EAAE,CAAC,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js new file mode 100644 index 00000000..1505a28c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map new file mode 100644 index 00000000..87ae7d68 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.js new file mode 100644 index 00000000..f5fff422 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map new file mode 100644 index 00000000..f7ecda3c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../src/util/base64.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js b/node_modules/@azure/core-http/dist-esm/src/util/constants.js new file mode 100644 index 00000000..445ed086 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +export const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "2.2.5", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map new file mode 100644 index 00000000..b4fa9367 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/util/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC;;GAEG;AACH,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB;;OAEG;IACH,eAAe,EAAE,OAAO;IAExB;;OAEG;IACH,IAAI,EAAE,OAAO;IAEb;;OAEG;IACH,KAAK,EAAE,QAAQ;IAEf;;OAEG;IACH,UAAU,EAAE,YAAY;IAExB;;OAEG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;WAEG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;YACpB,kBAAkB,EAAE,GAAG;SACxB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;WAIG;QACH,WAAW,EAAE,aAAa;QAE1B;;WAEG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"2.2.5\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/delay.js b/node_modules/@azure/core-http/dist-esm/src/util/delay.js new file mode 100644 index 00000000..3e674a45 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/delay.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { isDefined } from "./typeguards"; +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map b/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map new file mode 100644 index 00000000..a8c70f06 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../../src/util/delay.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAmB,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;GAQG;AACH,MAAM,UAAU,KAAK,CACnB,SAAiB,EACjB,KAAS,EACT,OAGC;IAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,IAAI,KAAK,GAA8C,SAAS,CAAC;QACjE,IAAI,SAAS,GAA6B,SAAS,CAAC;QAEpD,MAAM,aAAa,GAAG,GAAS,EAAE;YAC/B,OAAO,MAAM,CACX,IAAI,UAAU,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,EAAC,CAAC,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,CAAC,CAAC,CAAC,oBAAoB,CAAC,CACvF,CAAC;QACJ,CAAC,CAAC;QAEF,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,SAAS,EAAE;gBACrC,OAAO,CAAC,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;aAC7D;QACH,CAAC,CAAC;QAEF,SAAS,GAAG,GAAS,EAAE;YACrB,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;gBACpB,YAAY,CAAC,KAAK,CAAC,CAAC;aACrB;YACD,eAAe,EAAE,CAAC;YAClB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC,CAAC;QAEF,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,OAAO,CAAC,WAAW,CAAC,OAAO,EAAE;YACvD,OAAO,aAAa,EAAE,CAAC;SACxB;QAED,KAAK,GAAG,UAAU,CAAC,GAAG,EAAE;YACtB,eAAe,EAAE,CAAC;YAClB,OAAO,CAAC,KAAK,CAAC,CAAC;QACjB,CAAC,EAAE,SAAS,CAAC,CAAC;QAEd,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EAAE;YACxB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;SAC1D;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError, AbortSignalLike } from \"@azure/abort-controller\";\nimport { isDefined } from \"./typeguards\";\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds.\n * @param delayInMs - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @param options - The options for delay - currently abort options\n * @param abortSignal - The abortSignal associated with containing operation.\n * @param abortErrorMsg - The abort error message associated with containing operation.\n * @returns - Resolved promise\n */\nexport function delay(\n delayInMs: number,\n value?: T,\n options?: {\n abortSignal?: AbortSignalLike;\n abortErrorMsg?: string;\n }\n): Promise {\n return new Promise((resolve, reject) => {\n let timer: ReturnType | undefined = undefined;\n let onAborted: (() => void) | undefined = undefined;\n\n const rejectOnAbort = (): void => {\n return reject(\n new AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)\n );\n };\n\n const removeListeners = (): void => {\n if (options?.abortSignal && onAborted) {\n options.abortSignal.removeEventListener(\"abort\", onAborted);\n }\n };\n\n onAborted = (): void => {\n if (isDefined(timer)) {\n clearTimeout(timer);\n }\n removeListeners();\n return rejectOnAbort();\n };\n\n if (options?.abortSignal && options.abortSignal.aborted) {\n return rejectOnAbort();\n }\n\n timer = setTimeout(() => {\n removeListeners();\n resolve(value);\n }, delayInMs);\n\n if (options?.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", onAborted);\n }\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js new file mode 100644 index 00000000..e8888450 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +export const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +export const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +export const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +export function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} +//# sourceMappingURL=exponentialBackoffStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map new file mode 100644 index 00000000..944c0b47 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.js","sourceRoot":"","sources":["../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C,sBAAsB;AACtB,MAAM,CAAC,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AACvD,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AAC3D,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAE1D,MAAM,UAAU,QAAQ,CAAC,CAAU;IACjC,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB;IAElB,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;QAC/B,OAAO,KAAK,CAAC;KACd;IAED,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB;IAEhB,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;QAChC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../coreHttp\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js new file mode 100644 index 00000000..65d8b5e2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const custom = {}; +//# sourceMappingURL=inspect.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map new file mode 100644 index 00000000..edb11685 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.js","sourceRoot":"","sources":["../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,MAAM,GAAG,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const custom = {};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js new file mode 100644 index 00000000..53fba755 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { inspect } from "util"; +export const custom = inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map new file mode 100644 index 00000000..5ba871ce --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.js","sourceRoot":"","sources":["../../../src/util/inspect.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B,MAAM,CAAC,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js new file mode 100644 index 00000000..7683347f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder, URLQuery } from "../url"; +import { isObject } from "./utils"; +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +export class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map new file mode 100644 index 00000000..30d07b3a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.js","sourceRoot":"","sources":["../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAC9C,OAAO,EAAiB,QAAQ,EAAE,MAAM,SAAS,CAAC;AAkBlD,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;AAEhE,MAAM,OAAO,SAAS;IAIpB,YAAY,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,KAAuB,EAAE;QACzF,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;YACpD,CAAC,CAAC,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;YACtD,CAAC,CAAC,yBAAyB,CAAC;QAE9B,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;YAC5D,CAAC,CAAC,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;YAC9D,CAAC,CAAC,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;IAC5F,CAAC;IAEM,QAAQ,CAAC,GAAY;QAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,EAAE,EAAE;YAC9B,iEAAiE;YACjE,IAAI,KAAK,YAAY,KAAK,EAAE;gBAC1B,uCACK,KAAK,KACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,IACtB;aACH;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;gBACzB,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;aACrD;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;gBACxB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;aAC1C;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;gBAC1B,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;aACnD;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;gBACzB,6BAA6B;gBAC7B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;gBAC7B,2BAA2B;gBAC3B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;gBAClC,iEAAiE;gBACjE,mDAAmD;gBACnD,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;gBAClD,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;oBACnB,OAAO,YAAY,CAAC;iBACrB;gBACD,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;aACjB;YAED,OAAO,KAAK,CAAC;QACf,CAAC,EACD,CAAC,CACF,CAAC;IACJ,CAAC;IAEO,eAAe,CAAC,KAAoB;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IACnF,CAAC;IAEO,aAAa,CAAC,KAAoB;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC;IAEO,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;aACnC;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;aAC/B;SACF;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,WAAW,CAAC,KAAa;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC3C,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,KAAK,CAAC;SACd;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QAC1C,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACrD,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;aAC9B;SACF;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC/B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js new file mode 100644 index 00000000..cc3bb616 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=serializer.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map new file mode 100644 index 00000000..c4c3d96b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.js","sourceRoot":"","sources":["../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC;AAC/B;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js new file mode 100644 index 00000000..b9e5a384 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +export const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map new file mode 100644 index 00000000..42c8a66a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.js","sourceRoot":"","sources":["../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,8BAA8B,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js new file mode 100644 index 00000000..d686cab8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +//# sourceMappingURL=typeguards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map new file mode 100644 index 00000000..580daf52 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.js","sourceRoot":"","sources":["../../../src/util/typeguards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,UAAU,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if the value is not null or undefined.\n * @param thing - Anything\n * @internal\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js b/node_modules/@azure/core-http/dist-esm/src/util/utils.js new file mode 100644 index 00000000..407e56c3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Constants } from "./constants"; +import { XML_ATTRKEY } from "./serializer.common"; +import { v4 as uuidv4 } from "uuid"; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export const isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map new file mode 100644 index 00000000..5849d28c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/util/utils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAIxC,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAEpC,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAEnF;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE,EAAE;QAC1C,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;GAKG;AACH,wDAAwD;AACxD,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,wDAAwD;IACxD,OAAO,CAAC,EAAY,EAAQ,EAAE;QAC5B,OAAO;aACJ,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE;YAClB,0DAA0D;YAC1D,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QAC7B,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,0DAA0D;YAC1D,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CACtC,OAAuC;IAEvC,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,CAAC,EAAsB,EAAQ,EAAE;QACtC,OAAO;aACJ,IAAI,CAAC,CAAC,IAA2B,EAAE,EAAE;YACpC,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACnF,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB;IAErB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IAED,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;QACrC,OAAO,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;KAC/B;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,EAAE,YAAY,EAAE,CAAC;IAC1D,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,eAAwB,EAAE,WAAkB;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;IACF,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;YAChE,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACnE,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAc;IAC5C,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js new file mode 100644 index 00000000..fff2bf2a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +if (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) { + throw new Error(`This library depends on the following DOM objects: ["document", "DOMParser", "Node", "XMLSerializer"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `); +} +let cachedDoc; +function getDoc() { + if (!cachedDoc) { + cachedDoc = document.implementation.createDocument(null, null, null); + } + return cachedDoc; +} +let cachedParser; +function getParser() { + if (!cachedParser) { + cachedParser = new DOMParser(); + } + return cachedParser; +} +let cachedSerializer; +function getSerializer() { + if (!cachedSerializer) { + cachedSerializer = new XMLSerializer(); + } + return cachedSerializer; +} +export function parseXML(str, opts = {}) { + var _a, _b, _c; + try { + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = getParser().parseFromString(str, "application/xml"); + throwIfError(dom); + let obj; + if (updatedOptions.includeRoot) { + obj = domToObject(dom, updatedOptions); + } + else { + obj = domToObject(dom.childNodes[0], updatedOptions); + } + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +let errorNS; +function getErrorNamespace() { + var _a; + if (errorNS === undefined) { + try { + errorNS = + (_a = getParser().parseFromString("INVALID", "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _a !== void 0 ? _a : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + errorNS = ""; + } + } + return errorNS; +} +function throwIfError(dom) { + const parserErrors = dom.getElementsByTagName("parsererror"); + if (parserErrors.length > 0 && getErrorNamespace()) { + for (let i = 0; i < parserErrors.length; i++) { + if (parserErrors[i].namespaceURI === errorNS) { + throw new Error(parserErrors[i].innerHTML); + } + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node, options) { + let result = {}; + const childNodeCount = node.childNodes.length; + const firstChildNode = node.childNodes[0]; + const onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + const elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result[XML_ATTRKEY] = {}; + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result[options.xmlCharKey] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject = domToObject(child, options); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +export function stringifyXML(content, opts = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "root", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0]; + return ('' + + getSerializer().serializeToString(dom)); +} +function buildAttributes(attrs) { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = getDoc().createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName, options) { + if (obj === undefined || + obj === null || + typeof obj === "string" || + typeof obj === "number" || + typeof obj === "boolean") { + const elem = getDoc().createElement(elementName); + elem.textContent = obj === undefined || obj === null ? "" : obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName, options)) { + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + const elem = getDoc().createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === XML_ATTRKEY) { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } + else if (key === options.xmlCharKey) { + elem.textContent = obj[key].toString(); + } + else { + for (const child of buildNode(obj[key], key, options)) { + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map new file mode 100644 index 00000000..83018552 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,IAAI,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;IAC1E,MAAM,IAAI,KAAK,CACb,oUAAoU,CACrU,CAAC;CACH;AAED,IAAI,SAA+B,CAAC;AACpC,SAAS,MAAM;IACb,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;KACtE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,IAAI,YAAmC,CAAC;AACxC,SAAS,SAAS;IAChB,IAAI,CAAC,YAAY,EAAE;QACjB,YAAY,GAAG,IAAI,SAAS,EAAE,CAAC;KAChC;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,IAAI,gBAA2C,CAAC;AAChD,SAAS,aAAa;IACpB,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,aAAa,EAAE,CAAC;KACxC;IACD,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,IAAI;QACF,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,EAAE;YAC7B,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;YACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;SAC3C,CAAC;QACF,MAAM,GAAG,GAAG,SAAS,EAAE,CAAC,eAAe,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;QAChE,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAI,GAAG,CAAC;QACR,IAAI,cAAc,CAAC,WAAW,EAAE;YAC9B,GAAG,GAAG,WAAW,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC;SACxC;aAAM;YACL,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;SACtD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAQ,EAAE;QACjB,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAA2B,CAAC;AAEhC,SAAS,iBAAiB;;IACxB,IAAI,OAAO,KAAK,SAAS,EAAE;QACzB,IAAI;YACF,OAAO;gBACL,MAAA,SAAS,EAAE,CAAC,eAAe,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;qBACtF,YAAa,mCAAI,EAAE,CAAC;SAC1B;QAAC,OAAO,OAAY,EAAE;YACrB,oFAAoF;YACpF,OAAO,GAAG,EAAE,CAAC;SACd;KACF;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,MAAM,YAAY,GAAG,GAAG,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7D,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,EAAE,EAAE;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,KAAK,OAAO,EAAE;gBAC5C,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;aAC5C;SACF;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU,EAAE,OAAoC;IACnE,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,MAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,MAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,MAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;QAEzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SACrD;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,kBAAkB,CAAC;SACjD;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,MAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;gBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB,EAAE,OAA0B,EAAE;;IACzE,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,MAAM;QACjC,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;QACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;KAC3C,CAAC;IACF,MAAM,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,cAAc,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3E,OAAO,CACL,yDAAyD;QACzD,aAAa,EAAE,CAAC,iBAAiB,CAAC,GAAG,CAAC,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB,EAAE,OAAoC;IACpF,IACE,GAAG,KAAK,SAAS;QACjB,GAAG,KAAK,IAAI;QACZ,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,SAAS,EACxB;QACA,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,IAAI,CAAC,WAAW,GAAG,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QAC3E,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAK,MAAM,SAAS,IAAI,GAAG,EAAE;YAC3B,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,SAAS,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE;gBAC9D,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI,GAAG,KAAK,WAAW,EAAE;gBACvB,KAAK,MAAM,IAAI,IAAI,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM,IAAI,GAAG,KAAK,OAAO,CAAC,UAAU,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;aACxC;iBAAM;gBACL,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE;oBACrD,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,EAAE,CAAC,CAAC;KAChE;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\nif (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) {\n throw new Error(\n `This library depends on the following DOM objects: [\"document\", \"DOMParser\", \"Node\", \"XMLSerializer\"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `\n );\n}\n\nlet cachedDoc: Document | undefined;\nfunction getDoc(): Document {\n if (!cachedDoc) {\n cachedDoc = document.implementation.createDocument(null, null, null);\n }\n return cachedDoc;\n}\n\nlet cachedParser: DOMParser | undefined;\nfunction getParser(): DOMParser {\n if (!cachedParser) {\n cachedParser = new DOMParser();\n }\n return cachedParser;\n}\n\nlet cachedSerializer: XMLSerializer | undefined;\nfunction getSerializer(): XMLSerializer {\n if (!cachedSerializer) {\n cachedSerializer = new XMLSerializer();\n }\n return cachedSerializer;\n}\n\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n try {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = getParser().parseFromString(str, \"application/xml\");\n throwIfError(dom);\n\n let obj;\n if (updatedOptions.includeRoot) {\n obj = domToObject(dom, updatedOptions);\n } else {\n obj = domToObject(dom.childNodes[0], updatedOptions);\n }\n\n return Promise.resolve(obj);\n } catch (err: any) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS: string | undefined;\n\nfunction getErrorNamespace(): string {\n if (errorNS === undefined) {\n try {\n errorNS =\n getParser().parseFromString(\"INVALID\", \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n } catch (ignored: any) {\n // Most browsers will return a document containing , but IE will throw.\n errorNS = \"\";\n }\n }\n return errorNS;\n}\n\nfunction throwIfError(dom: Document): void {\n const parserErrors = dom.getElementsByTagName(\"parsererror\");\n if (parserErrors.length > 0 && getErrorNamespace()) {\n for (let i = 0; i < parserErrors.length; i++) {\n if (parserErrors[i].namespaceURI === errorNS) {\n throw new Error(parserErrors[i].innerHTML);\n }\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node, options: Required): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[XML_ATTRKEY] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[options.xmlCharKey] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child, options);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\nexport function stringifyXML(content: unknown, opts: SerializerOptions = {}): string {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"root\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0];\n return (\n '' +\n getSerializer().serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = getDoc().createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string, options: Required): Node[] {\n if (\n obj === undefined ||\n obj === null ||\n typeof obj === \"string\" ||\n typeof obj === \"number\" ||\n typeof obj === \"boolean\"\n ) {\n const elem = getDoc().createElement(elementName);\n elem.textContent = obj === undefined || obj === null ? \"\" : obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName, options)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = getDoc().createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === XML_ATTRKEY) {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else if (key === options.xmlCharKey) {\n elem.textContent = obj[key].toString();\n } else {\n for (const child of buildNode(obj[key], key, options)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.js new file mode 100644 index 00000000..230f0c22 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as xml2js from "xml2js"; +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map new file mode 100644 index 00000000..f1aff151 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../src/util/xml.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,qIAAqI;AACrI,mGAAmG;AACnG,8EAA8E;AAC9E,+GAA+G;AAC/G,MAAM,sBAAsB,GAAqB;IAC/C,eAAe,EAAE,KAAK;IACtB,IAAI,EAAE,KAAK;IACX,SAAS,EAAE,KAAK;IAChB,aAAa,EAAE,KAAK;IACpB,OAAO,EAAE,WAAW;IACpB,aAAa,EAAE,IAAI;IACnB,WAAW,EAAE,KAAK;IAClB,UAAU,EAAE,KAAK;IACjB,YAAY,EAAE,IAAI;IAClB,SAAS,EAAE,SAAS;IACpB,KAAK,EAAE,KAAK;IACZ,gBAAgB,EAAE,KAAK;IACvB,qBAAqB,EAAE,KAAK;IAC5B,QAAQ,EAAE,IAAI;IACd,eAAe,EAAE,KAAK;IACtB,iBAAiB,EAAE,KAAK;IACxB,KAAK,EAAE,KAAK;IACZ,MAAM,EAAE,IAAI;IACZ,kBAAkB,EAAE,SAAS;IAC7B,mBAAmB,EAAE,SAAS;IAC9B,iBAAiB,EAAE,SAAS;IAC5B,eAAe,EAAE,SAAS;IAC1B,QAAQ,EAAE,MAAM;IAChB,MAAM,EAAE;QACN,OAAO,EAAE,KAAK;QACd,QAAQ,EAAE,OAAO;QACjB,UAAU,EAAE,IAAI;KACjB;IACD,OAAO,EAAE,SAAS;IAClB,UAAU,EAAE;QACV,MAAM,EAAE,IAAI;QACZ,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;KACd;IACD,QAAQ,EAAE,KAAK;IACf,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,EAAE;IACZ,KAAK,EAAE,KAAK;CACb,CAAC;AAEF,0DAA0D;AAC1D,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C,2DAA2D;AAC3D,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;IACjC,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE;;IACrE,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAC1D,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;gBACtC,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js b/node_modules/@azure/core-http/dist-esm/src/webResource.js new file mode 100644 index 00000000..3fc3d27e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js @@ -0,0 +1,264 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js.map b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map new file mode 100644 index 00000000..e5ed93ce --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../src/webResource.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAChF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAOlD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAuJ5C,MAAM,UAAU,iBAAiB,CAAC,MAAe;IAC/C,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;YAClC,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;YACrC,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;YACtC,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;YACrC,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;YAC1D,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;YACxC,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,OAAO,WAAW;IAsGtB,YACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC;QAEvC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;QAC3D,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;IAChF,CAAC;IAED;;;;OAIG;IACH,yBAAyB;QACvB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,OAAO,CAAC,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACrD,CAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;YACjD,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;YACpD,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,0EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;wBAC/E,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,gCAAgC,aAAa,EAAE;4BAC1E,8CAA8C,yBAAyB,GAAG;4BAC1E,0EAA0E,aAAa,6BAA6B;4BACpH,wCAAwC,aAAa,6DAA6D,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,mEAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,qFAAqF;oBACrF,2IAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,MAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,mEAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBACrD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;YACvD,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;SACxC;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;SAC9C;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QAEjD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js new file mode 100644 index 00000000..2e3b259f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "./httpHeaders"; +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient { + sendRequest(request) { + var _a; + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + return Promise.reject(new AbortError("The operation was aborted.")); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) || request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if (request.streamResponseBody || ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + // Blob.text() is not supported in IE so using FileReader instead + const reader = new FileReader(); + reader.onload = function (e) { + var _a; + const text = (_a = e.target) === null || _a === void 0 ? void 0 : _a.result; + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + }; + reader.onerror = function (_e) { + rej(reader.error); + }; + reader.readAsText(xhr.response, "UTF-8"); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, RestError.REQUEST_SEND_ERROR, undefined, request))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map new file mode 100644 index 00000000..3d020177 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAGrD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,OAAO,aAAa;IACjB,WAAW,CAAC,OAAwB;;QACzC,MAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAI,WAAW,CAAC,OAAO,EAAE;gBACvB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC,CAAC;aACrE;YAED,MAAM,QAAQ,GAAG,GAAS,EAAE;gBAC1B,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;gBAC5C,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,MAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QAED,GAAG,CAAC,YAAY;YACd,CAAA,MAAA,OAAO,CAAC,yBAAyB,0CAAE,IAAI,KAAI,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAE1F,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,GAAG,CAAC,YAAY,KAAK,MAAM,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,kBAAkB,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;gBAClD,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE,CAChC,OAAO,CAAC;oBACN,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;iBAC7B,CAAC,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF;AAED,SAAS,kBAAkB,CACzB,GAAmB,EACnB,OAAwB,EACxB,GAAgF,EAChF,GAA2B;IAE3B,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;;QAC5C,wCAAwC;QACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;YACtD,IAAI,OAAO,CAAC,kBAAkB,KAAI,MAAA,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA,EAAE;gBACpF,MAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBACrD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;wBAChC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;oBACxB,CAAC,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;gBAC9C,CAAC,CAAC,CAAC;gBACH,GAAG,CAAC;oBACF,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,QAAQ;iBACT,CAAC,CAAC;aACJ;iBAAM;gBACL,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;oBAChC,uFAAuF;oBACvF,sEAAsE;oBACtE,oDAAoD;oBACpD,IAAI,GAAG,CAAC,QAAQ,EAAE;wBAChB,iEAAiE;wBACjE,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;wBAChC,MAAM,CAAC,MAAM,GAAG,UAAU,CAAC;;4BACzB,MAAM,IAAI,GAAG,MAAA,CAAC,CAAC,MAAM,0CAAE,MAAgB,CAAC;4BACxC,GAAG,CAAC;gCACF,OAAO;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,UAAU,EAAE,IAAI;6BACjB,CAAC,CAAC;wBACL,CAAC,CAAC;wBACF,MAAM,CAAC,OAAO,GAAG,UAAU,EAAE;4BAC3B,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;wBACpB,CAAC,CAAC;wBACF,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;qBAC1C;yBAAM;wBACL,GAAG,CAAC;4BACF,OAAO;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;yBAC3B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,CAAC,QAAQ,EAAE,EAAE,CAC5C,QAAQ,CAAC;YACP,WAAW,EAAE,QAAQ,CAAC,MAAM;SAC7B,CAAC,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,MAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,MAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;QAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CACjC,MAAM,CACJ,IAAI,SAAS,CACX,6BAA6B,OAAO,CAAC,GAAG,EAAE,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF,CACF,CAAC;IACF,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;IAChE,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;IACxD,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAC5D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n if (abortSignal.aborted) {\n return Promise.reject(new AbortError(\"The operation was aborted.\"));\n }\n\n const listener = (): void => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n\n xhr.responseType =\n request.streamResponseStatusCodes?.size || request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (xhr.responseType === \"blob\") {\n return new Promise((resolve, reject) => {\n handleBlobResponse(xhr, request, resolve, reject);\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction handleBlobResponse(\n xhr: XMLHttpRequest,\n request: WebResourceLike,\n res: (value: HttpOperationResponse | PromiseLike) => void,\n rej: (reason?: any) => void\n): void {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n if (request.streamResponseBody || request.streamResponseStatusCodes?.has(xhr.status)) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n } else {\n xhr.addEventListener(\"load\", () => {\n // xhr.response is of Blob type if the request is sent with xhr.responseType === \"blob\"\n // but the status code is not one of the stream response status codes,\n // so treat it as text and convert from Blob to text\n if (xhr.response) {\n // Blob.text() is not supported in IE so using FileReader instead\n const reader = new FileReader();\n reader.onload = function (e) {\n const text = e.target?.result as string;\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: text,\n });\n };\n reader.onerror = function (_e) {\n rej(reader.error);\n };\n reader.readAsText(xhr.response, \"UTF-8\");\n } else {\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n });\n }\n });\n }\n }\n });\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n): void {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n): void {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n const abortError = new AbortError(\"The operation was aborted.\");\n xhr.addEventListener(\"abort\", () => reject(abortError));\n xhr.addEventListener(\"timeout\", () => reject(abortError));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist/index.js b/node_modules/@azure/core-http/dist/index.js new file mode 100644 index 00000000..d2670e36 --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js @@ -0,0 +1,5536 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var uuid = require('uuid'); +var util = require('util'); +var tslib = require('tslib'); +var xml2js = require('xml2js'); +var abortController = require('@azure/abort-controller'); +var logger$1 = require('@azure/logger'); +var coreAuth = require('@azure/core-auth'); +var os = require('os'); +var http = require('http'); +var https = require('https'); +var tough = require('tough-cookie'); +var tunnel = require('tunnel'); +var stream = require('stream'); +var FormData = require('form-data'); +var node_fetch = require('node-fetch'); +var coreTracing = require('@azure/core-tracing'); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "2.2.5", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +const XML_CHARKEY = "_"; + +// Copyright (c) Microsoft Corporation. +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +const isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} + +// Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. +function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A class that handles the query portion of a URLBuilder. + */ +class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + +// Copyright (c) Microsoft Corporation. +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} + +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + +// Copyright (c) Microsoft Corporation. +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; + +// Copyright (c) Microsoft Corporation. +const logger = logger$1.createClientLogger("core-http"); + +// Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +class ReportTransform extends stream.Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController$1 = new abortController.AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new abortController.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController$1.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController$1.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData__default["default"](); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new abortController.AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch__default["default"](input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + }); + httpRequest.headers.set("Cookie", cookieString); + } + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + async processRequest(operationResponse) { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader !== undefined) { + await new Promise((resolve, reject) => { + this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); + } + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} + +// Copyright (c) Microsoft Corporation. +const StandardAbortMessage$1 = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} + +// Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(exports.RetryMode || (exports.RetryMode = {})); +const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); + } +} +async function retry$1(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry$1(policy, request, res, retryData); + } + catch (err) { + return retry$1(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; +} + +// Copyright (c) Microsoft Corporation. +function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; + +// Copyright (c) Microsoft Corporation. +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await delay(delayInMs, undefined, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new abortController.AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} + +// Copyright (c) Microsoft Corporation. +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); + if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ServiceClient sends service requests and receives responses. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (coreAuth.isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + const scopes = options.credentialScopes; + return Array.isArray(scopes) + ? scopes.map((scope) => new URL(scope).toString()) + : new URL(scopes).toString(); + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} + +// Copyright (c) Microsoft Corporation. +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +function createSpanFunction(args) { + return coreTracing.createSpanFunction(args); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} + +// Copyright (c) Microsoft Corporation. +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Authenticates to a service using an API key. + */ +class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} + +Object.defineProperty(exports, 'isTokenCredential', { + enumerable: true, + get: function () { return coreAuth.isTokenCredential; } +}); +exports.AccessTokenRefresher = AccessTokenRefresher; +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.XML_ATTRKEY = XML_ATTRKEY; +exports.XML_CHARKEY = XML_CHARKEY; +exports.applyMixins = applyMixins; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.createPipelineFromOptions = createPipelineFromOptions; +exports.createSpanFunction = createSpanFunction; +exports.delay = delay; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isNode = isNode; +exports.isValidUuid = isValidUuid; +exports.keepAlivePolicy = keepAlivePolicy; +exports.logPolicy = logPolicy; +exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; +exports.parseXML = parseXML; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stringifyXML = stringifyXML; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.tracingPolicy = tracingPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-http/dist/index.js.map b/node_modules/@azure/core-http/dist/index.js.map new file mode 100644 index 00000000..18cee14c --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/httpHeaders.ts","../src/util/base64.ts","../src/util/constants.ts","../src/util/serializer.common.ts","../src/util/utils.ts","../src/serializer.ts","../src/webResource.ts","../src/url.ts","../src/proxyAgent.ts","../src/util/sanitizer.ts","../src/util/inspect.ts","../src/restError.ts","../src/log.ts","../src/nodeFetchHttpClient.ts","../src/httpPipelineLogLevel.ts","../src/operationOptions.ts","../src/policies/requestPolicy.ts","../src/util/xml.ts","../src/policies/deserializationPolicy.ts","../src/policies/keepAlivePolicy.ts","../src/policies/redirectPolicy.ts","../src/util/exponentialBackoffStrategy.ts","../src/util/typeguards.ts","../src/util/delay.ts","../src/policies/exponentialRetryPolicy.ts","../src/policies/logPolicy.ts","../src/operationParameter.ts","../src/operationSpec.ts","../src/policies/msRestUserAgentPolicy.ts","../src/policies/userAgentPolicy.ts","../src/queryCollectionFormat.ts","../src/policies/bearerTokenAuthenticationPolicy.ts","../src/policies/disableResponseDecompressionPolicy.ts","../src/policies/generateClientRequestIdPolicy.ts","../src/httpClientCache.ts","../src/policies/ndJsonPolicy.ts","../src/policies/proxyPolicy.ts","../src/policies/rpRegistrationPolicy.ts","../src/policies/signingPolicy.ts","../src/policies/systemErrorRetryPolicy.ts","../src/util/throttlingRetryStrategy.ts","../src/policies/throttlingRetryPolicy.ts","../src/policies/tracingPolicy.ts","../src/serviceClient.ts","../src/createSpanLegacy.ts","../src/credentials/accessTokenCache.ts","../src/credentials/accessTokenRefresher.ts","../src/credentials/basicAuthenticationCredentials.ts","../src/credentials/apiKeyCredentials.ts","../src/credentials/topicCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"2.2.5\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tough from \"tough-cookie\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a \"Set-Cookie\" header.\n */\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader !== undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../coreHttp\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if the value is not null or undefined.\n * @param thing - Anything\n * @internal\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError, AbortSignalLike } from \"@azure/abort-controller\";\nimport { isDefined } from \"./typeguards\";\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds.\n * @param delayInMs - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @param options - The options for delay - currently abort options\n * @param abortSignal - The abortSignal associated with containing operation.\n * @param abortErrorMsg - The abort error message associated with containing operation.\n * @returns - Resolved promise\n */\nexport function delay(\n delayInMs: number,\n value?: T,\n options?: {\n abortSignal?: AbortSignalLike;\n abortErrorMsg?: string;\n }\n): Promise {\n return new Promise((resolve, reject) => {\n let timer: ReturnType | undefined = undefined;\n let onAborted: (() => void) | undefined = undefined;\n\n const rejectOnAbort = (): void => {\n return reject(\n new AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)\n );\n };\n\n const removeListeners = (): void => {\n if (options?.abortSignal && onAborted) {\n options.abortSignal.removeEventListener(\"abort\", onAborted);\n }\n };\n\n onAborted = (): void => {\n if (isDefined(timer)) {\n clearTimeout(timer);\n }\n removeListeners();\n return rejectOnAbort();\n };\n\n if (options?.abortSignal && options.abortSignal.aborted) {\n return rejectOnAbort();\n }\n\n timer = setTimeout(() => {\n removeListeners();\n resolve(value);\n }, delayInMs);\n\n if (options?.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", onAborted);\n }\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, undefined, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback, isNode } from \"./util/utils\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n const scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map((scope) => new URL(scope).toString())\n : new URL(scopes).toString();\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","tunnel","inspect","createClientLogger","Transform","tough","abortController","AbortController","AbortError","FormData","https","http","node_fetch","HttpPipelineLogLevel","__rest","xml2js","StandardAbortMessage","RetryMode","retry","logger","coreLogger","os","QueryCollectionFormat","DefaultHttpClient","utils.generateUuid","createSpanFunction","SpanKind","getTraceParentHeader","isSpanContextValid","SpanStatusCode","isTokenCredential","utils.prepareXMLRootList","utils.isPrimitiveType","coreTracingCreateSpanFunction","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;AAEG;AACH,SAAS,YAAY,CAAC,UAAkB,EAAA;AACtC,IAAA,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4EK,SAAU,iBAAiB,CAAC,MAAgB,EAAA;AAChD,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;AAC3C,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;AACtC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;AACzC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;AACvC,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;AAC5C,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;AAEG;MACU,WAAW,CAAA;AAGtB,IAAA,WAAA,CAAY,UAA2B,EAAA;AACrC,QAAA,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;AACtB,QAAA,IAAI,UAAU,EAAE;AACd,YAAA,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;AAC9C,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B,EAAA;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;AAC3C,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;KACH;AAED;;;;AAIG;AACI,IAAA,GAAG,CAAC,UAAkB,EAAA;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,UAAkB,EAAA;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;AAED;;;;AAIG;AACI,IAAA,MAAM,CAAC,UAAkB,EAAA;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AAClD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,UAAU,GAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;KAC5C;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;AACjC,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;AAEG;IACI,WAAW,GAAA;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;AACjC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACnC,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;AAClC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;AACD,QAAA,OAAO,YAAY,CAAC;KACrB;AAED;;AAEG;IACI,MAAM,CAAC,UAAsC,EAAE,EAAA;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpC,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;AACvD,gBAAA,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AAClD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KAC5D;AAED;;AAEG;IACI,KAAK,GAAA;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;AAClD,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpD,SAAA;AACD,QAAA,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;KAChD;AACF;;AC5PD;AACA;AAEA;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;AAGG;AACG,SAAU,eAAe,CAAC,KAAiB,EAAA;;;IAG/C,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;AAC/F,IAAA,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC;;AC5BA;AACA;AACA;;AAEG;AACU,MAAA,SAAS,GAAG;AACvB;;AAEG;AACH,IAAA,eAAe,EAAE,OAAO;AAExB;;AAEG;AACH,IAAA,IAAI,EAAE,OAAO;AAEb;;AAEG;AACH,IAAA,KAAK,EAAE,QAAQ;AAEf;;AAEG;AACH,IAAA,UAAU,EAAE,YAAY;AAExB;;AAEG;AACH,IAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,IAAA,QAAQ,EAAE,UAAU;AAEpB;;AAEG;AACH,IAAA,SAAS,EAAE,WAAW;AAEtB,IAAA,aAAa,EAAE;AACb;;AAEG;AACH,QAAA,SAAS,EAAE;AACT,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,MAAM,EAAE,QAAQ;AAChB,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACd,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACf,SAAA;AAED,QAAA,WAAW,EAAE;AACX,YAAA,eAAe,EAAE,GAAG;AACpB,YAAA,kBAAkB,EAAE,GAAG;AACxB,SAAA;AACF,KAAA;AAED;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAE9B,QAAA,oBAAoB,EAAE,QAAQ;AAE9B;;;;AAIG;AACH,QAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,QAAA,UAAU,EAAE,YAAY;AACzB,KAAA;;;ACnFH;AACA;AAEA;;AAEG;AACI,MAAM,WAAW,GAAG,IAAI;AAC/B;;AAEG;AACI,MAAM,WAAW,GAAG;;ACV3B;AAUA,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAEnF;;AAEG;AACU,MAAA,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;AAClB,IAAA,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK;AAY1B;;;;;AAKG;AACG,SAAU,SAAS,CAAC,GAAW,EAAA;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;AAC3B,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,QAA+B,EAAA;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;AACjC,IAAA,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;AAC5C,IAAA,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;AAC5C,IAAA,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AAC1C,IAAA,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,YAAY,CAAC,OAAwB,EAAA;AACnD,IAAA,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;AAC3B,QAAA,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;AAKG;AACG,SAAU,WAAW,CAAC,IAAY,EAAA;AACtC,IAAA,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;AAIG;SACa,YAAY,GAAA;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;AAQG;AACa,SAAA,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB,EAAA;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxC,IAAA,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,KAAI;AAC1C,QAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AACvC,KAAC,CAAC,CAAC;AACH,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;AAKG;AACH;AACM,SAAU,iBAAiB,CAAC,OAAqB,EAAA;AACrD,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;;IAED,OAAO,CAAC,EAAY,KAAU;QAC5B,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAAS,KAAI;;AAElB,YAAA,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;AAC7B,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;;YAEpB,EAAE,CAAC,GAAG,CAAC,CAAC;AACV,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,wBAAwB,CACtC,OAAuC,EAAA;AAEvC,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;IACD,OAAO,CAAC,EAAsB,KAAU;QACtC,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAA2B,KAAI;AACpC,YAAA,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACnF,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;AACpB,YAAA,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;AAC5B,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAEK,SAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB,EAAA;AAErB,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;AACvB,QAAA,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACb,KAAA;AAED,IAAA,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;AACrC,QAAA,OAAO,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;AAC/B,KAAA;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,GAAG,YAAY,EAAE,CAAC;AAC1D,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIG;AACa,SAAA,WAAW,CAAC,eAAwB,EAAE,WAAkB,EAAA;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;AACF,IAAA,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,KAAI;AACjC,QAAA,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,KAAI;AAChE,YAAA,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;AACnE,SAAC,CAAC,CAAC;AACL,KAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;AAIG;AACG,SAAU,UAAU,CAAC,KAAa,EAAA;AACtC,IAAA,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;AAMG;SACa,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB,EAAA;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;AAKG;AACG,SAAU,eAAe,CAAC,KAAc,EAAA;AAC5C,IAAA,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAEK,SAAU,mBAAmB,CAAC,IAAY,EAAA;AAC9C,IAAA,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACrB,QAAA,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC1B,KAAA;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AACxC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;AAGG;AACG,SAAU,QAAQ,CAAC,KAAc,EAAA;AACrC,IAAA,QACE,OAAO,KAAK,KAAK,QAAQ;AACzB,QAAA,KAAK,KAAK,IAAI;AACd,QAAA,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;AACrB,QAAA,EAAE,KAAK,YAAY,MAAM,CAAC;AAC1B,QAAA,EAAE,KAAK,YAAY,IAAI,CAAC,EACxB;AACJ;;ACxRA;AAQA;AAEA;;;;AAIG;MACU,UAAU,CAAA;AACrB,IAAA,WAAA;AACE;;AAEG;AACa,IAAA,YAAA,GAAuC,EAAE;AACzD;;AAEG;IACa,KAAe,EAAA;QAJf,IAAY,CAAA,YAAA,GAAZ,YAAY,CAA6B;QAIzC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAU;KAC7B;AAEJ;;;;;AAKG;AACH,IAAA,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB,EAAA;AACpE,QAAA,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,KACX;AACT,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,CAAA,EAAI,UAAU,CAAA,cAAA,EAAiB,KAAK,CAAA,iCAAA,EAAoC,cAAc,CAAA,GAAA,EAAM,eAAe,CAAA,CAAA,CAAG,CAC/G,CAAC;AACJ,SAAC,CAAC;AACF,QAAA,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;AACvB,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;AAC/D,gBAAA,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC1C,aAAA;AACD,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;AACpF,gBAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;AAC9D,oBAAA,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,KAAK,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;AACA,gBAAA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;KACF;AAED;;;;;;;;AAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;AACtB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;AAC9B,SAAA;;;;;;;;;;AAYD,QAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;AAEtC,QAAA,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,qBAAA,CAAuB,CAAC,CAAC;AACvD,SAAA;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,6BAAA,CAA+B,CAAC,CAAC;AAC/D,SAAA;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;AACtD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,gBAAA,CAAkB,CAAC,CAAC;AAClD,SAAA;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;AAClB,SAAA;AAAM,aAAA;;YAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;AAClB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;AAC/D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;AACpD,gBAAA,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;AAChF,aAAA;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC9D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,YAAA,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;;AAED,YAAA,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;AACrC,gBAAA,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;AACpC,aAAA;AACD,YAAA,OAAO,YAAY,CAAC;AACrB,SAAA;AAED,QAAA,IAAI,OAAY,CAAC;AACjB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;AACjE;;;;AAIG;AACH,gBAAA,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;AAC1C,oBAAA,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;AACA,oBAAA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;AAC7C,iBAAA;AACF,aAAA;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC1C,gBAAA,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;AAC7C,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;AAChB,iBAAA;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;AACjB,iBAAA;AAAM,qBAAA;oBACL,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;AACxB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;AAC1E,gBAAA,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;AAC5C,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;AAClD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAGC,YAAmB,CAAC,YAAsB,CAAC,CAAC;AACvD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;AACxD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;AACrD,gBAAA,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;AAC/B,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;KAChB;AACF,CAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU,EAAA;AACtC,IAAA,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;AACrB,IAAA,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;AAC1C,QAAA,EAAE,GAAG,CAAC;AACP,KAAA;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW,EAAA;IACpC,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;AACnC,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,uEAAA,CAAyE,CAAC,CAAC;AAC5F,KAAA;;IAED,MAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW,EAAA;IACvC,IAAI,CAAC,GAAG,EAAE;AACR,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC5C,QAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,KAAA;;AAED,IAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;;AAEhD,IAAA,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB,EAAA;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;AACtB,IAAA,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAEjC,QAAA,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;AAC3B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;AACzC,gBAAA,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AACvD,aAAA;AAAM,iBAAA;gBACL,YAAY,IAAI,IAAI,CAAC;AACrB,gBAAA,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB,EAAA;IACtC,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACnC,QAAA,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;AAC3B,KAAA;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS,EAAA;IAC/B,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU,EAAA;AAC3E,IAAA,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AACxC,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA0B,wBAAA,CAAA,CAAC,CAAC;AAC9E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAChF,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA4C,0CAAA,CAAA,CAC/E,CAAC;AACH,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;AAChD,YAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAC/E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;AACvB,gBAAA,UAAU,KAAK,UAAU;AACzB,gBAAA,EAAE,KAAK,YAAY,WAAW,CAAC;AAC/B,gBAAA,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1B,gBAAA,EAAE,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,KAAK,KAAK,YAAY,IAAI,CAAC,EACpF;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,CAAA,qGAAA,CAAuG,CACrH,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU,EAAA;IAClF,IAAI,CAAC,aAAa,EAAE;AAClB,QAAA,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,CAAA,iBAAA,CAAmB,CACnF,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,KAAI;AAC5C,QAAA,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;AACnD,SAAA;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;AACxB,KAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,CAAG,EAAA,KAAK,6BAA6B,UAAU,CAAA,wBAAA,EAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,CAAA,CAAA,CAAG,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;AAC7C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;AAC9C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB,EAAA;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AACtC,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK;AACH,gBAAA,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;AACtC,sBAAE,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACtD,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;AACxD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,2DAAA,CAA6D,CAAC,CAAC;AAC7F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAqE,mEAAA,CAAA;AAChF,oBAAA,CAAA,iDAAA,CAAmD,CACtD,CAAC;AACH,aAAA;AACD,YAAA,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AAC/B,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAsD,mDAAA,EAAA,KAAK,CAAI,EAAA,CAAA,CAC7E,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAC1B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,uBAAA,CAAyB,CAAC,CAAC;AACzD,KAAA;AACD,IAAA,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACxC,IAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACtC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAE1F,QAAA,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;AACrC,YAAA,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;AAC7C,kBAAE,CAAA,MAAA,EAAS,WAAW,CAAC,kBAAkB,CAAE,CAAA;kBACzC,OAAO,CAAC;AACZ,YAAA,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;AACzC,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAQ,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAE,CAAC;AACtC,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AACnD,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,wBAAA,CAA0B,CAAC,CAAC;AAC1D,KAAA;AACD,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AACpC,IAAA,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACrC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;;AAE1F,QAAA,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACrF,KAAA;;AAGD,IAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,QAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAS,MAAA,EAAA,MAAM,CAAC,kBAAkB,CAAA,CAAE,GAAG,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;AAC9B,QAAA,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,EAAE,CAAC;AAC1D,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;AAC/C,KAAA;AAED,IAAA,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,CAAA,iCAAA,EAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,CAAA,EAAA,CAAI,CACN,CAAC;AACH,KAAA;AAED,IAAA,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;AAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,CAAmD,gDAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SAAS,CAAI,EAAA,CAAA,CAAC,CAAC;AAC/F,SAAA;QACD,UAAU,GAAG,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,CAAqD,mDAAA,CAAA;AACnD,gBAAA,CAAA,QAAA,EAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CACpC,WAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SACd,CAAA,cAAA,EAAiB,UAAU,CAAA,EAAA,CAAI,CAClC,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACzE,KAAA;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,YAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;AACV,aAAA;AAED,YAAA,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,oBAAA,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;AACnC,iBAAA;AAAM,qBAAA;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;AACpE,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;AACjE,gBAAA,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;AAEvB,gBAAA,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;AAC5B,oBAAA,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;AACxB,yBAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;AACA,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;AAC7B,qBAAA;AACD,oBAAA,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACvC,iBAAA;AACF,aAAA;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,gBAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,oBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;AACxC,0BAAE,CAAA,MAAA,EAAS,MAAM,CAAC,kBAAkB,CAAE,CAAA;0BACpC,OAAO,CAAC;AACZ,oBAAA,YAAY,CAAC,WAAW,CAAC,GACpB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,YAAY,CAAC,WAAW,CAAC,CAC5B,EAAA,EAAA,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAChC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;AAClC,sBAAE,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;AAEjB,gBAAA,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,gBAAA,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;AACA,oBAAA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;AACrC,iBAAA;AAED,gBAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AAEF,gBAAA,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;AAC1D,oBAAA,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACjF,oBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAI1C,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;AACvD,qBAAA;AAAM,yBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/C,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,GAAG,KAAK,EAAE,CAAC;AACtE,qBAAA;AAAM,yBAAA;AACL,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;AAChC,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC/F,QAAA,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,YAAA,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;AACnC,gBAAA,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;AACH,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;AAChB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAO,eAAe,CAAC;AACxB,KAAA;AAED,IAAA,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;AAChD,UAAE,CAAA,MAAA,EAAS,cAAc,CAAC,kBAAkB,CAAE,CAAA;UAC5C,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,GAAG,cAAc,CAAC,YAAY,EAAE,CAAC;AAEjE,IAAA,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpD,QAAA,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;AAChC,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,MAAM,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAa,eAAe,CAAE,CAAC;AAC3C,YAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AACF,KAAA;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AAC7C,IAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC,EAAA;AACtF,IAAA,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;;AAEpC,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;AACnF,KAAA;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,QAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;AACpC,QAAA,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;AACzD,YAAA,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;AACxD,SAAA;AAED,QAAA,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;AAC3F,QAAA,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AACjD,gBAAA,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;AAChD,oBAAA,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AAED,gBAAA,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;AAC5B,SAAA;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B;;;;;;;;;;;;;AAaE;AACF,oBAAA,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;AACvC,oBAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAG,cAAe,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;AACrD,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;AAC7C,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;;AAEL,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;AAEvB,YAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACxB,gBAAA,IAAI,CAAC,GAAG;oBAAE,MAAM;AAChB,gBAAA,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;YACD,gBAAgB,GAAG,GAAG,CAAC;AACvB,YAAA,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;AAUtE,YAAA,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;AACA,gBAAA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;AAC1C,aAAA;AAED,YAAA,IAAI,eAAe,CAAC;;AAEpB,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;AAC7E,gBAAA,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACrC,gBAAA,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;;;AAGF,gBAAA,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAA,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;AAC3D,wBAAA,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACtB,qBAAA;AACF,iBAAA;gBACD,QAAQ,GAAG,aAAa,CAAC;AAC1B,aAAA;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;AACtF,gBAAA,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACF,gBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;AACjC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACpE,IAAA,IAAI,0BAA0B,EAAE;AAC9B,QAAA,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,KAAa;AACjE,YAAA,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;AACjC,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC;AAEF,QAAA,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;AAC3C,YAAA,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;AAC3B,gBAAA,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;AACnC,gBAAA,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACnC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AAChC,IAAA,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAC7F,SAAA;AACD,QAAA,OAAO,cAAc,CAAC;AACvB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACpC,IAAA,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;AAChB,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;AAEhC,YAAA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;AAC/B,SAAA;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,CAAA,EAAG,UAAU,CAAA,CAAA,EAAI,CAAC,CAAG,CAAA,CAAA,EACrB,OAAO,CACR,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD,EAAA;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,IAAA,IAAI,wBAAwB,EAAE;AAC5B,QAAA,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;AAClC,YAAA,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;AACnC,gBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;AACjE,gBAAA,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;AAC7B,sBAAE,kBAAkB;AACpB,sBAAE,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;AACrF,gBAAA,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB,EAAA;AAEvB,IAAA,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB,EAAA;AAClF,IAAA,QACE,QAAQ;AACR,QAAA,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AAyTD;;AAEG;AACG,SAAU,eAAe,CAAC,WAAoB,EAAA;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;AAAE,QAAA,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;AACrC,QAAA,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;AAClD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;AACtC,QAAA,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;AACjB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAAM,SAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;AAC9C,QAAA,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;AACnE,SAAA;AACD,QAAA,OAAO,UAAU,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;AACH,SAAS,OAAO,CAAmB,CAAW,EAAA;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;AACnB,QAAA,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH;AACO,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;AACX,CAAA;;ACp9CD;AAmKM,SAAU,iBAAiB,CAAC,MAAe,EAAA;AAC/C,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;AAClC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;AACrC,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;AACtC,YAAA,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;AACrC,YAAA,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;AAC1D,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;AACxC,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;AAKG;MACU,WAAW,CAAA;AAsGtB,IAAA,WAAA,CACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC,EAAA;AAEvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;AAC3D,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;AAC9B,QAAA,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;AAC/E,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AACnB,QAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;AAC1B,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;AAChD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;AAC5B,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;KAC/E;AAED;;;;AAIG;IACH,yBAAyB,GAAA;AACvB,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,SAAA;AACD,QAAA,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;AACb,YAAA,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;AACjD,SAAA;KACF;AAED;;;;AAIG;AACH,IAAA,OAAO,CAAC,OAA8B,EAAA;QACpC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;AAED,QAAA,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;AACvC,YAAA,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;AACH,SAAA;AAED,QAAA,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;AACpD,aAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;AACf,YAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;AACnC,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;AAC1D,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AACxB,SAAA;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;AAClB,YAAA,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AAC3F,YAAA,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;AACrB,oBAAA,OAAO,CAAC,MAAM;oBACd,4CAA4C;AAC5C,oBAAA,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;AACH,aAAA;AACF,SAAA;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;AACjD,YAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;AACnE,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;AAClD,aAAA;AACD,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;AACP,iBAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;AACpD,YAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,wEAAA,CAA0E,CACxG,CAAC;AACH,iBAAA;AACD,gBAAA,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI,EAAA;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACxC,oBAAA,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;AAClB,wBAAA,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;AACA,wBAAA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;AAC/E,wBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,6BAAA,EAAgC,aAAa,CAAE,CAAA;AAC1E,4BAAA,CAAA,2CAAA,EAA8C,yBAAyB,CAAG,CAAA,CAAA;AAC1E,4BAAA,CAAA,uEAAA,EAA0E,aAAa,CAA6B,2BAAA,CAAA;4BACpH,CAAwC,qCAAA,EAAA,aAAa,CAA6D,2DAAA,CAAA,CACrH,CAAC;AACH,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;AACxD,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;AACpB,4BAAA,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,CAAA,iEAAA,CAAmE,CAC3G,CAAC;AACH,yBAAA;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;AAC1C,yBAAA;AAAM,6BAAA;AACL,4BAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9D,yBAAA;AACF,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;AAChB,SAAA;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;AAChD,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,CAA6E,2EAAA,CAAA;oBAC3E,CAAqF,mFAAA,CAAA;AACrF,oBAAA,CAAA,yIAAA,CAA2I,CAC9I,CAAC;AACH,aAAA;;AAED,YAAA,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;AAC5C,gBAAA,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;AACjB,aAAA;;YAED,MAAM,WAAW,GAAG,EAAE,CAAC;;AAEvB,YAAA,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;AAChB,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;AAC5C,gBAAA,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;AACxD,gBAAA,IAAI,UAAU,EAAE;AACd,oBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,wBAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;AAC7D,qBAAA;AAAM,yBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AACzC,wBAAA,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;AACrB,4BAAA,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,CAAA,iEAAA,CAAmE,CAC7G,CAAC;AACH,yBAAA;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;AAC/C,yBAAA;AAAM,6BAAA;AACL,4BAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9E,4BAAA,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACnE,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,SAAA;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AACrD,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;AACnD,aAAA;AACF,SAAA;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;AAC9C,SAAA;;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AAC5D,SAAA;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AACrE,SAAA;;AAGD,QAAA,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;;YAEvD,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;AAClD,iBAAA;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;AAC9D,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;AACH,iBAAA;AACD,gBAAA,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC1C,iBAAA;AACF,aAAA;AACF,SAAA;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxC,SAAA;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AACrD,QAAA,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AAEjD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;AACjC,SAAA;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC3C,SAAA;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;AAC1B,YAAA,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;AACnD,SAAA;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;AAChC,YAAA,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;AAC/D,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;AChmBD;AAOA;;AAEG;MACU,QAAQ,CAAA;AAArB,IAAA,WAAA,GAAA;QACmB,IAAS,CAAA,SAAA,GAAwD,EAAE,CAAC;KAiItF;AA/HC;;AAEG;IACI,GAAG,GAAA;AACR,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;AAED;;AAEG;IACI,IAAI,GAAA;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KACpC;AAED;;;;AAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB,EAAA;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;AACF,QAAA,IAAI,aAAa,EAAE;AACjB,YAAA,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;AACnE,gBAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAChD,sBAAE,kBAAkB;AACpB,sBAAE,kBAAkB,CAAC,QAAQ,EAAE,CAAC;AAClC,gBAAA,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;AAC1C,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACtC,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,GAAG,CAAC,aAAqB,EAAA;AAC9B,QAAA,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;AAED;;AAEG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,QAAA,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;AAC1C,YAAA,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrD,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;AAC5B,gBAAA,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,CAAA,EAAG,aAAa,CAAI,CAAA,EAAA,qBAAqB,CAAE,CAAA,CAAC,CAAC;AACpE,iBAAA;AACD,gBAAA,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtC,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,CAAG,EAAA,aAAa,CAAI,CAAA,EAAA,cAAc,EAAE,CAAC;AAChD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAE9B,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,gBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,aAAA;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;AACxB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,gBAAA,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAA,QAAQ,YAAY;AAClB,oBAAA,KAAK,eAAe;AAClB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;AAER,4BAAA,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;AAER,4BAAA;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,gBAAgB;AACnB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;AACN,gCAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;AAER,4BAAA;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA;AACE,wBAAA,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;AACzE,iBAAA;AACF,aAAA;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;AACrC,gBAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AAC3C,aAAA;AACF,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAED;;AAEG;MACU,UAAU,CAAA;AAOrB;;;AAGG;AACI,IAAA,SAAS,CAAC,MAA0B,EAAA;QACzC,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;AAC1B,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;IACI,SAAS,GAAA;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AAClC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAiC,EAAA;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;AACtD,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;AACnC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACxC,YAAA,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AAC9E,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,UAAU,CAAC,IAAwB,EAAA;AACxC,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;AACrD,YAAA,IAAI,WAAW,EAAE;AACf,gBAAA,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;AACpB,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,oBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,iBAAA;AAED,gBAAA,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;AAC3B,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,KAAyB,EAAA;QACvC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;AACzB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;KACF;AAED;;;;AAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B,EAAA;AAC/E,QAAA,IAAI,kBAAkB,EAAE;AACtB,YAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,gBAAA,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAC9B,aAAA;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;AAC1D,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,sBAAsB,CAAC,kBAA0B,EAAA;AACtD,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;AAED;;AAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B,EAAA;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;AAErD,QAAA,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;AACvB,YAAA,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;AACxD,YAAA,IAAI,SAA6B,CAAC;AAClC,YAAA,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;AAChB,oBAAA,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;AACT,wBAAA,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;AACpC,wBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;AAC1D,4BAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;AAER,oBAAA;wBACE,MAAM,IAAI,KAAK,CAAC,CAAA,2BAAA,EAA8B,KAAK,CAAC,IAAI,CAAE,CAAA,CAAC,CAAC;AAC/D,iBAAA;AACF,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,MAAM,IAAI,CAAG,EAAA,IAAI,CAAC,OAAO,KAAK,CAAC;AAChC,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,KAAK,EAAE,CAAC;AAC5B,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;AACD,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAA,CAAE,CAAC;AACxC,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;AAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB,EAAA;AACzD,QAAA,IAAI,WAAW,EAAE;AACf,YAAA,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACxE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACvE,SAAA;KACF;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;AAChC,QAAA,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AACnC,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;MAMY,QAAQ,CAAA;IACnB,WAAmC,CAAA,IAAY,EAAkB,IAAkB,EAAA;QAAhD,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAQ;QAAkB,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAc;KAAI;IAEhF,OAAO,MAAM,CAAC,IAAY,EAAA;AAC/B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;AACF,CAAA;AAED;;;AAGG;AACG,SAAU,uBAAuB,CAAC,SAAiB,EAAA;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACtD,IAAA,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;AACtD,SAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;AAEG;MACU,YAAY,CAAA;IAMvB,WAA4B,CAAA,KAAa,EAAE,KAAyB,EAAA;QAAxC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAQ;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,GAAG,KAAK,GAAG,gBAAgB,CAAC;AACtF,QAAA,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;AAED;;;AAGG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AAED;;AAEG;IACI,IAAI,GAAA;AACT,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;AAC9B,YAAA,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;AAChC,SAAA;AAAM,aAAA;YACL,QAAQ,IAAI,CAAC,aAAa;AACxB,gBAAA,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;AAER,gBAAA,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AAER,gBAAA;oBACE,MAAM,IAAI,KAAK,CAAC,CAAA,gCAAA,EAAmC,IAAI,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;AAC5E,aAAA;AACF,SAAA;AACD,QAAA,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;AACF,CAAA;AAED;;AAEG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAA;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,IAAA,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAC5D,QAAA,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;AAClD,IAAA,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;AAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa,EAAA;AAC3D,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;AACV,SAAA;AACD,QAAA,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;AACjC,KAAA;AACH,CAAC;AAED;;;AAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB,EAAA;AACvE,IAAA,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;AAClE,IAAA,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;AACpC,QAAA,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;AAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC,EAAA;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;AAEhB,IAAA,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACrC,QAAA,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;AAChE,QAAA,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;AACP,SAAA;AAAM,aAAA;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB,EAAA;AACrD,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,KAAK,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;AAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B,EAAA;AACrF,IAAA,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,KAAK,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB,EAAA;AACzC,IAAA,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAClD,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB,EAAA;AAC/C,IAAA,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AAC1E,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;AACxD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AACF,KAAA;AAAM,SAAA;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AAC1C,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,SAAA;AACF,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;AAC1C,QAAA,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;AACvC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB,EAAA;AACxC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;AAED,IAAA,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAChD,IAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC;;ACtqBA;SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB,EAAA;AAEzB,IAAA,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;AACT,QAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAClE,KAAA;AACD,IAAA,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;AACpC,QAAA,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;AAChG,KAAA;AACD,IAAA,MAAM,aAAa,GAAiC;AAClD,QAAA,KAAK,EAAE;AACL,YAAA,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;AACjD,SAAA;KACF,CAAC;AAEF,IAAA,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;AACpD,QAAA,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,CAAG,EAAA,aAAa,CAAC,QAAQ,CAAI,CAAA,EAAA,aAAa,CAAC,QAAQ,EAAE,CAAC;AACxF,KAAA;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,CAAA,CAAE,CAAC;AAC9D,KAAA;AAED,IAAA,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAEpD,IAAA,MAAM,UAAU,GAAG;AACjB,QAAA,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;AAEF,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAEK,SAAU,UAAU,CAAC,GAAW,EAAA;AACpC,IAAA,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;AAC1D,IAAA,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;SAEe,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C,EAAA;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;AAClC,QAAA,OAAOG,iBAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;AAC7C,KAAA;AAAM,SAAA,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA;AACL,QAAA,OAAOA,iBAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;AAC3C,KAAA;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY,EAAA;;;AAG/B,IAAA,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC;;ACzEA;AAsBA,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;MAEnD,SAAS,CAAA;IAIpB,WAAY,CAAA,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,EAAA,GAAuB,EAAE,EAAA;AACzF,QAAA,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AACpD,cAAE,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;cACpD,yBAAyB,CAAC;AAE9B,QAAA,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;AAC5D,cAAE,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;cAC5D,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;KAC3F;AAEM,IAAA,QAAQ,CAAC,GAAY,EAAA;AAC1B,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,KAAI;;YAE9B,IAAI,KAAK,YAAY,KAAK,EAAE;AAC1B,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,KAAK,CAAA,EAAA,EACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,EACtB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;AACzB,gBAAA,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;AACrD,aAAA;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;AACxB,gBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;AAC1C,aAAA;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;AAC1B,gBAAA,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;AACnD,aAAA;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;;AAEzB,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;;AAE7B,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;;;AAGlC,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;AAClD,gBAAA,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;AACnB,oBAAA,OAAO,YAAY,CAAC;AACrB,iBAAA;AACD,gBAAA,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AACjB,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;SACd,EACD,CAAC,CACF,CAAC;KACH;AAEO,IAAA,eAAe,CAAC,KAAoB,EAAA;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;KAClF;AAEO,IAAA,aAAa,CAAC,KAAoB,EAAA;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KAChF;AAEO,IAAA,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C,EAAA;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AACnC,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;AAC/B,aAAA;AACF,SAAA;AAED,QAAA,OAAO,SAAS,CAAC;KAClB;AAEO,IAAA,WAAW,CAAC,KAAa,EAAA;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC3C,QAAA,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC1C,QAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;AAC5B,YAAA,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;AACrD,gBAAA,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,QAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;KAC9B;AACF;;ACtLD;AAKO,MAAM,MAAM,GAAGC,YAAO,CAAC,MAAM;;ACLpC;AAQA,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;AAEG;AACG,MAAO,SAAU,SAAQ,KAAK,CAAA;IA8BlC,WACE,CAAA,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAAA;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;KAClD;AAED;;AAEG;AACH,IAAA,CAAC,MAAM,CAAC,GAAA;AACN,QAAA,OAAO,CAAc,WAAA,EAAA,IAAI,CAAC,OAAO,CAAO,IAAA,EAAA,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA,CAAE,CAAC;KACzE;;AAnDD;;AAEG;AACa,SAAkB,CAAA,kBAAA,GAAW,oBAAoB,CAAC;AAClE;;AAEG;AACa,SAAW,CAAA,WAAA,GAAW,aAAa;;ACrBrD;AAGO,MAAM,MAAM,GAAGC,2BAAkB,CAAC,WAAW,CAAC;;ACHrD;AAuBA,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB,EAAA;AAEtB,IAAA,OAAO,OAAO,GAAG,UAAU,CAAC,UAAU,GAAG,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BK,MAAO,eAAgB,SAAQC,gBAAS,CAAA;AAS5C,IAAA,WAAA,CAAoB,gBAA2D,EAAA;AAC7E,QAAA,KAAK,EAAE,CAAC;QADU,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAA2C;QARvE,IAAW,CAAA,WAAA,GAAW,CAAC,CAAC;KAU/B;AATD,IAAA,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B,EAAA;AAChF,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACjB,QAAA,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;KACrB;AAKF,CAAA;AAED,SAAS,gBAAgB,CAAC,IAAS,EAAA;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB,EAAA;AACnE,IAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;AAC7B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAK;AACxB,YAAA,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,KAAK,EAAE,CAAC;AACjB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;AACH,QAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;AAC5B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAChC,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;AAEG;AACG,SAAU,YAAY,CAAC,OAAgB,EAAA;AAC3C,IAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;AAC7B,QAAA,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAC9B,KAAC,CAAC,CAAC;AAEH,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;MACU,mBAAmB,CAAA;AAAhC,IAAA,WAAA,GAAA;;AA+LU,QAAA,IAAA,CAAA,aAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,IAAe,CAAA,eAAA,GAAe,EAAE,CAAC;AAExB,QAAA,IAAA,CAAA,SAAS,GAAG,IAAIC,gBAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;KAmHlF;AApTC;;;;AAIG;IACH,MAAM,WAAW,CAAC,WAA4B,EAAA;;AAC5C,QAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnD,YAAA,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;AACH,SAAA;AAED,QAAA,MAAMC,iBAAe,GAAG,IAAIC,+BAAe,EAAE,CAAC;AAC9C,QAAA,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;AAC3B,YAAA,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;AACnC,gBAAA,MAAM,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,aAAa,GAAG,CAAC,KAAY,KAAI;AAC/B,gBAAA,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1BF,iBAAe,CAAC,KAAK,EAAE,CAAC;AACzB,iBAAA;AACH,aAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;AAClE,SAAA;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,MAAK;gBACdA,iBAAe,CAAC,KAAK,EAAE,CAAC;AAC1B,aAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;AACzB,SAAA;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;AACxB,YAAA,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;AAC3C,YAAA,MAAM,WAAW,GAAG,IAAIG,4BAAQ,EAAE,CAAC;AACnC,YAAA,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,KAAU;;AAExD,gBAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;AACjB,iBAAA;AACD,gBAAA,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;AACA,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;AACrD,iBAAA;AAAM,qBAAA;AACL,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAChC,iBAAA;AACH,aAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;AAC3C,gBAAA,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;AACpC,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AAC5B,oBAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,qBAAA;AACF,iBAAA;AAAM,qBAAA;AACL,oBAAA,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;AAC/B,YAAA,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;AACpE,gBAAA,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACjD,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,CAAiC,8BAAA,EAAA,WAAW,CAAC,WAAW,EAAE,CAAA,CAAE,CAC7D,CAAC;AACH,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAC5C,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;AACzB,cAAE,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;AACtC,kBAAE,WAAW,CAAC,IAAI,EAAE;kBAClB,WAAW,CAAC,IAAI;cAClB,SAAS,CAAC;AACd,QAAA,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;AACpD,YAAA,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;AACtD,YAAA,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,gBAAA,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;AACL,gBAAA,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,aAAA;YAED,IAAI,GAAG,kBAAkB,CAAC;AAC3B,SAAA;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;AAEF,QAAA,MAAM,WAAW,GAAA,MAAA,CAAA,MAAA,CAAA,EACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAEH,iBAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,EACf,EAAA,2BAA2B,CAC/B,CAAC;AAEF,QAAA,IAAI,iBAAoD,CAAC;QACzD,IAAI;AACF,YAAA,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AAE/C,YAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,WAAW,CAAC,yBAAyB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;AAEjC,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;AACvB,gBAAA,kBAAkB,EAAE,SAAS;sBACxB,QAAQ,CAAC,IAAyC;AACrD,sBAAE,SAAS;AACb,gBAAA,UAAU,EAAE,CAAC,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,GAAG,SAAS;aAC3D,CAAC;AAEF,YAAA,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAC1D,YAAA,IAAI,kBAAkB,EAAE;AACtB,gBAAA,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;AAExF,gBAAA,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;AAClC,oBAAA,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACrE,oBAAA,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACxC,oBAAA,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;AAC7D,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;AACrE,oBAAA,IAAI,MAAM,EAAE;;AAEV,wBAAA,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;AAC7C,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAE7C,YAAA,OAAO,iBAAiB,CAAC;AAC1B,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;AACrC,YAAA,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;AACnC,gBAAA,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;AACH,aAAA;AAAM,iBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;AACxC,gBAAA,MAAM,IAAIE,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,MAAM,UAAU,CAAC;AAClB,SAAA;AAAS,gBAAA;;AAER,YAAA,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;AAC5C,gBAAA,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;AACzC,gBAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,oBAAA,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;AAC3C,iBAAA;AACD,gBAAA,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,KAAjB,IAAA,IAAA,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrCF,iBAAe,CAChB,CAAC;AACH,iBAAA;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,MAAK;;oBACT,CAAA,EAAA,GAAA,WAAW,CAAC,WAAW,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;AACT,iBAAC,CAAC;AACD,qBAAA,KAAK,CAAC,CAAC,CAAC,KAAI;AACX,oBAAA,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;AAC3E,iBAAC,CAAC,CAAC;AACN,aAAA;AACF,SAAA;KACF;AAQO,IAAA,gBAAgB,CAAC,WAA4B,EAAA;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;;;;QAK5C,IAAI,WAAW,CAAC,aAAa,EAAE;AAC7B,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,CAAA,EAAG,IAAI,CAAA,CAAA,EAAI,IAAI,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAE,CAAC;AACtD,YAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACjD,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;AAEF,YAAA,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,gBAAA,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;AACtD,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;AACtC,aAAA;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AAEzC,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;AAC1D,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;AAEF,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAII,gBAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACzE,aAAA;AAAM,iBAAA;AACL,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAIC,eAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACvE,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,OAAO,GAAGD,gBAAK,CAAC,WAAW,GAAGC,eAAI,CAAC,WAAW,CAAC;AACvD,SAAA;KACF;AAED;;AAEG;;AAEH,IAAA,MAAM,KAAK,CAAC,KAAwB,EAAE,IAAwB,EAAA;AAC5D,QAAA,OAAOC,8BAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;KACtE;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,WAA4B,EAAA;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;AAEnF,QAAA,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YACxD,MAAM,YAAY,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,KAAI;AACjE,gBAAA,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAI;AAC/D,oBAAA,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,qBAAA;AAAM,yBAAA;wBACL,OAAO,CAAC,MAAM,CAAC,CAAC;AACjB,qBAAA;AACH,iBAAC,CAAC,CAAC;AACL,aAAC,CAAC,CAAC;YAEH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;AACjD,SAAA;;QAGD,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAEvD,QAAA,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAEtD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,iBAAwC,EAAA;QAC3D,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,MAAM,eAAe,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YACpE,IAAI,eAAe,KAAK,SAAS,EAAE;gBACjC,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;oBAC1C,IAAI,CAAC,SAAU,CAAC,SAAS,CACvB,eAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,CAAC,GAAG,KAAI;AACN,wBAAA,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,yBAAA;AAAM,6BAAA;AACL,4BAAA,OAAO,EAAE,CAAC;AACX,yBAAA;AACH,qBAAC,CACF,CAAC;AACJ,iBAAC,CAAC,CAAC;AACJ,aAAA;AACF,SAAA;KACF;AACF;;AC7ZD;AACA;AAEA;;AAEG;AACSC,sCAoBX;AApBD,CAAA,UAAY,oBAAoB,EAAA;AAC9B;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,KAAA,CAAA,GAAA,CAAA,CAAA,GAAA,KAAG,CAAA;AAEH;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AAEL;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,SAAA,CAAA,GAAA,CAAA,CAAA,GAAA,SAAO,CAAA;AAEP;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACN,CAAC,EApBWA,4BAAoB,KAApBA,4BAAoB,GAoB/B,EAAA,CAAA,CAAA;;AC1BD;AAwDA;;;;AAIG;AACG,SAAU,oCAAoC,CAClD,IAAO,EAAA;AAEP,IAAA,MAAM,EAAE,cAAc,EAAE,cAAc,EAA2B,GAAA,IAAI,EAA1B,iBAAiB,GAAKC,YAAA,CAAA,IAAI,EAA/D,CAAA,gBAAA,EAAA,gBAAA,CAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;AAEnD,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,GAAQ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,MAAM,CAAK,EAAA,cAAc,CAAE,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;;QAEtD,MAAM,CAAC,WAAW,GAAI,cAAsB,KAAA,IAAA,IAAtB,cAAc,KAAd,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,cAAc,CAAU,WAAW,CAAC;AAC3D,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB;;AC/EA;AA0BA;;AAEG;MACmB,iBAAiB,CAAA;AACrC;;AAEG;AACH,IAAA,WAAA;AACE;;AAEG;IACM,WAA0B;AACnC;;AAEG;IACM,QAAkC,EAAA;QAJlC,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAI1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAA0B;KACzC;AAQJ;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;AACF,CAAA;AAsBD;;AAEG;MACU,oBAAoB,CAAA;AAC/B,IAAA,WAAA,CAAoB,OAA4B,EAAA;QAA5B,IAAO,CAAA,OAAA,GAAP,OAAO,CAAqB;KAAI;AAEpD;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;AAC7C,QAAA,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKD,4BAAoB,CAAC,GAAG;AACrC,YAAA,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AACrC,SAAA;KACF;AACF;;ACxHD;AAMA;AACA;AACA;AACA;AACA,MAAM,sBAAsB,GAAqB;AAC/C,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,IAAI,EAAE,KAAK;AACX,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,aAAa,EAAE,IAAI;AACnB,IAAA,WAAW,EAAE,KAAK;AAClB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,YAAY,EAAE,IAAI;AAClB,IAAA,SAAS,EAAE,SAAS;AACpB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,gBAAgB,EAAE,KAAK;AACvB,IAAA,qBAAqB,EAAE,KAAK;AAC5B,IAAA,QAAQ,EAAE,IAAI;AACd,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,iBAAiB,EAAE,KAAK;AACxB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,MAAM,EAAE,IAAI;AACZ,IAAA,kBAAkB,EAAE,SAAS;AAC7B,IAAA,mBAAmB,EAAE,SAAS;AAC9B,IAAA,iBAAiB,EAAE,SAAS;AAC5B,IAAA,eAAe,EAAE,SAAS;AAC1B,IAAA,QAAQ,EAAE,MAAM;AAChB,IAAA,MAAM,EAAE;AACN,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,QAAQ,EAAE,OAAO;AACjB,QAAA,UAAU,EAAE,IAAI;AACjB,KAAA;AACD,IAAA,OAAO,EAAE,SAAS;AAClB,IAAA,UAAU,EAAE;AACV,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,OAAO,EAAE,IAAI;AACd,KAAA;AACD,IAAA,QAAQ,EAAE,KAAK;AACf,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,QAAQ,EAAE,EAAE;AACZ,IAAA,KAAK,EAAE,KAAK;CACb,CAAC;AAEF;AACA,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C;AACA,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;AACjC,IAAA,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;AAIG;SACa,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE,EAAA;;AACrE,IAAA,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAIE,iBAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAC1D,IAAA,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;AAIG;SACa,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE,EAAA;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAIA,iBAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;QACrC,IAAI,CAAC,GAAG,EAAE;AACR,YAAA,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;AACxC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,KAAI;AACtC,gBAAA,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,iBAAA;AAAM,qBAAA;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;AACd,iBAAA;AACH,aAAC,CAAC,CAAC;AACJ,SAAA;AACH,KAAC,CAAC,CAAC;AACL;;AChGA;AA+CA;;;AAGG;AACa,SAAA,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC,EAAA;IAElC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAEM,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AAClE,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAE3E,MAAM,6BAA6B,GAA2B;AACnE,IAAA,oBAAoB,EAAE;AACpB,QAAA,IAAI,EAAE,uBAAuB;AAC7B,QAAA,GAAG,EAAE,sBAAsB;AAC5B,KAAA;CACF,CAAC;AAEF;;;AAGG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAK1D,IAAA,WAAA,CACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE,EAAA;;AAEtC,QAAA,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;AAExC,QAAA,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;AAC/F,QAAA,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,CAAA,EAAA,GAAA,cAAc,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;KAC5D;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,KAChF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;AAC5B,SAAA,CAAC,CACH,CAAC;KACH;AACF,CAAA;AAED,SAAS,oBAAoB,CAC3B,cAAqC,EAAA;AAErC,IAAA,IAAI,MAAqC,CAAC;AAC1C,IAAA,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;AACxD,IAAA,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;AACvE,IAAA,IAAI,aAAa,EAAE;AACjB,QAAA,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;AACzD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AACjE,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC,EAAA;AACtE,IAAA,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;AAC3C,IAAA,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;AACf,KAAA;AAAM,SAAA,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;AAC5B,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;AAC5C,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACG,SAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,OAAA,GAA6B,EAAE,EAAA;;AAE/B,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;AACF,IAAA,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,KAAI;AACjB,QAAA,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;AAC3D,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;AAE1D,QAAA,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;AACF,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,MAAM,KAAK,CAAC;AACb,SAAA;AAAM,aAAA,IAAI,oBAAoB,EAAE;AAC/B,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;;;AAID,QAAA,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;AAC3B,gBAAA,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;AACxD,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;8BAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;8BAC3D,EAAE,CAAC;AACV,iBAAA;gBACD,IAAI;AACF,oBAAA,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;AACH,iBAAA;AAAC,gBAAA,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,CAAA,8CAAA,EAAiD,cAAc,CAAC,UAAU,CAAA,CAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;AACF,oBAAA,MAAM,SAAS,CAAC;AACjB,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;AAE9C,gBAAA,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC7E,aAAA;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,EAC5B,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AAED,QAAA,OAAO,cAAc,CAAC;AACxB,KAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B,EAAA;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;AACjE,IAAA,QACE,mBAAmB,CAAC,MAAM,KAAK,CAAC;AAChC,SAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,EAC1E;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C,EAAA;;AAE3C,IAAA,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;AACtF,IAAA,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;AACvE,UAAE,iBAAiB;AACnB,UAAE,CAAC,CAAC,YAAY,CAAC;AAEnB,IAAA,IAAI,oBAAoB,EAAE;AACxB,QAAA,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,aAAA;AACF,SAAA;AAAM,aAAA;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,SAAA;AACF,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,YAAY,KAAA,IAAA,IAAZ,YAAY,KAAA,KAAA,CAAA,GAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;AAC1E,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;AAC5E,QAAA,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;AACnC,UAAE,CAAA,wBAAA,EAA2B,cAAc,CAAC,MAAM,CAAE,CAAA;AACpD,UAAG,cAAc,CAAC,UAAqB,CAAC;AAE1C,IAAA,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;;;IAIF,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,MAAM,KAAK,CAAC;AACb,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;AACvD,IAAA,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;;;QAGF,IAAI,cAAc,CAAC,UAAU,EAAE;AAC7B,YAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;AAC7C,YAAA,IAAI,WAAW,CAAC;AAChB,YAAA,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;AACzC,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;AAChB,wBAAA,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,GAAG,EAAE,CAAC;AACvF,iBAAA;AACD,gBAAA,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;AACH,aAAA;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;AACzE,YAAA,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;AACzB,gBAAA,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;AACvC,aAAA;AAED,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;AAC1C,aAAA;AACF,SAAA;;AAGD,QAAA,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;AAAC,IAAA,OAAO,YAAiB,EAAE;AAC1B,QAAA,KAAK,CAAC,OAAO,GAAG,CAAA,OAAA,EAAU,YAAY,CAAC,OAAO,CAAA,gDAAA,EAAmD,cAAc,CAAC,UAAU,CAAA,2BAAA,CAA6B,CAAC;AACzJ,KAAA;AAED,IAAA,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC,EAAA;;AAEjC,IAAA,MAAM,YAAY,GAAG,CAAC,GAA6B,KAAoB;QACrE,MAAM,GAAG,GAAG,CAAU,OAAA,EAAA,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,CAAA,CAAA,CAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;AAClD,QAAA,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;AACF,QAAA,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC3B,KAAC,CAAC;AAEF,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;AAClF,QAAA,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAC/C,IAAA,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;AAC9C,QAAA,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;AAC1C,QAAA,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;AAC9C,cAAE,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,KAAK,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;AACvE,QAAA,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;AAC9B,YAAA,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;AACA,YAAA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,KAAI;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC7B,aAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AAC3F,YAAA,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;AACxB,iBAAA,IAAI,CAAC,CAAC,IAAI,KAAI;AACb,gBAAA,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;AACpC,gBAAA,OAAO,iBAAiB,CAAC;AAC3B,aAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C;;AC9XA;AAwBA;;AAEG;AACI,MAAM,uBAAuB,GAAqB;AACvD,IAAA,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;AAIG;AACG,SAAU,eAAe,CAAC,gBAAmC,EAAA;IACjE,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;SAC9F;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC,EAAA;AAEnD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAAkB;KAGpD;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzED;AAaA;;AAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBjC,MAAM,sBAAsB,GAAoB;AACrD,IAAA,eAAe,EAAE,IAAI;AACrB,IAAA,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;AAIG;AACa,SAAA,cAAc,CAAC,cAAc,GAAG,EAAE,EAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,cAAe,SAAQ,iBAAiB,CAAA;AACnD,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE,EAAA;AAC5F,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAK;KAE7F;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;KAC1D;AACF,CAAA;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB,EAAA;AAEtB,IAAA,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;AACxD,IAAA,IACE,cAAc;SACb,MAAM,KAAK,GAAG;AACb,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC5D,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;AAChC,QAAA,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;QAIjC,IAAI,MAAM,KAAK,GAAG,EAAE;AAClB,YAAA,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;AACrB,SAAA;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,GAAG,KAAK,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;AACnE,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC;;ACpGA;AACA;AAIO,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C;AACO,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEpD,SAAU,QAAQ,CAAC,CAAU,EAAA;AACjC,IAAA,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;AAQG;AACG,SAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB,EAAA;AAElB,IAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;AAC/B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;AAOG;SACa,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB,EAAA;AAEhB,IAAA,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;AACnB,YAAA,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;AAClC,SAAA;AAED,QAAA,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;AACvB,KAAA;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;AAGvB,IAAA,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/D,IAAA,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;AAChC,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;AAEnC,IAAA,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;AAEF,IAAA,OAAO,SAAS,CAAC;AACnB;;ACtFA;AACA;AAEA;;;;AAIG;AACG,SAAU,SAAS,CAAI,KAA2B,EAAA;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD;;ACVA;AAMA,MAAMC,sBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;AAQG;SACa,KAAK,CACnB,SAAiB,EACjB,KAAS,EACT,OAGC,EAAA;IAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;QACrC,IAAI,KAAK,GAA8C,SAAS,CAAC;QACjE,IAAI,SAAS,GAA6B,SAAS,CAAC;QAEpD,MAAM,aAAa,GAAG,MAAW;AAC/B,YAAA,OAAO,MAAM,CACX,IAAIR,0BAAU,CAAC,CAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,aAAa,IAAG,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,aAAa,GAAGQ,sBAAoB,CAAC,CACvF,CAAC;AACJ,SAAC,CAAC;QAEF,MAAM,eAAe,GAAG,MAAW;YACjC,IAAI,CAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,KAAI,SAAS,EAAE;gBACrC,OAAO,CAAC,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAC7D,aAAA;AACH,SAAC,CAAC;QAEF,SAAS,GAAG,MAAW;AACrB,YAAA,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;gBACpB,YAAY,CAAC,KAAK,CAAC,CAAC;AACrB,aAAA;AACD,YAAA,eAAe,EAAE,CAAC;YAClB,OAAO,aAAa,EAAE,CAAC;AACzB,SAAC,CAAC;AAEF,QAAA,IAAI,CAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,OAAO,CAAC,WAAW,CAAC,OAAO,EAAE;YACvD,OAAO,aAAa,EAAE,CAAC;AACxB,SAAA;AAED,QAAA,KAAK,GAAG,UAAU,CAAC,MAAK;AACtB,YAAA,eAAe,EAAE,CAAC;YAClB,OAAO,CAAC,KAAK,CAAC,CAAC;SAChB,EAAE,SAAS,CAAC,CAAC;AAEd,QAAA,IAAI,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EAAE;YACxB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAC1D,SAAA;AACH,KAAC,CAAC,CAAC;AACL;;AC9DA;AA0BA;;;;;AAKG;SACa,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACSC,2BAMX;AAND,CAAA,UAAY,SAAS,EAAA;AACnB;;;AAGG;AACH,IAAA,SAAA,CAAA,SAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACb,CAAC,EANWA,iBAAS,KAATA,iBAAS,GAMpB,EAAA,CAAA,CAAA,CAAA;AA8BM,MAAM,mBAAmB,GAAiB;AAC/C,IAAA,UAAU,EAAE,0BAA0B;AACtC,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;AAEG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;AAc3D;;;;;;;AAOG;IACH,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAKC,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,KAAKA,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;KAC7E;AACF,CAAA;AAED,eAAeA,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB,EAAA;IAEzB,SAAS,iBAAiB,CAAC,aAAqC,EAAA;QAC9D,MAAM,UAAU,GAAG,aAAa,KAAA,IAAA,IAAb,aAAa,KAAb,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,KAAR,IAAA,IAAA,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;AACtF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,IACE,UAAU,KAAK,SAAS;AACxB,aAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;AACxC,YAAA,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;AACA,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;AACnC,QAAA,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;AAC1C,KAAA,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;AAC1F,IAAA,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,CAAA,oBAAA,EAAuB,SAAS,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;QAC9D,IAAI;AACF,YAAA,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrC,YAAA,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAOA,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;AAC/C,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,OAAOA,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AACzD,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;AAEjD,QAAA,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;AACJ,QAAA,MAAM,GAAG,CAAC;AACX,KAAA;AAAM,SAAA;AACL,QAAA,OAAO,QAAQ,CAAC;AACjB,KAAA;AACH;;AC1MA;AA8CA;;;;AAIG;AACa,SAAA,SAAS,CAAC,cAAA,GAAmC,EAAE,EAAA;IAC7D,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC3D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,SAAU,SAAQ,iBAAiB,CAAA;AA4C9C,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,UACEC,QAAM,GAAGC,MAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE,EAAA;AAExB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,MAAM,GAAGD,QAAM,CAAC;AACrB,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;KAChF;AApDD;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;KAC1C;AAED;;;;;;AAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B,EAAA;AAC3D,QAAA,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;KACxD;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;KAC9C;AAED;;;;AAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC,EAAA;AACnE,QAAA,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;KAChE;AAgBM,IAAA,WAAW,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAEvE,QAAA,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;KAC7F;AAEO,IAAA,UAAU,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,MAAM,CAAC,CAAA,SAAA,EAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;KAC7D;AAEO,IAAA,WAAW,CAAC,QAA+B,EAAA;QACjD,IAAI,CAAC,MAAM,CAAC,CAAA,sBAAA,EAAyB,QAAQ,CAAC,MAAM,CAAE,CAAA,CAAC,CAAC;AACxD,QAAA,IAAI,CAAC,MAAM,CAAC,CAAY,SAAA,EAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;AACrE,QAAA,OAAO,QAAQ,CAAC;KACjB;AACF;;ACxID;AACA;AAqDA;;;;AAIG;AACG,SAAU,0BAA0B,CAAC,SAA6B,EAAA;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAEe,SAAA,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc,EAAA;AAEd,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;AACxB,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AACvC,QAAA,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;AC5EA;AAiGA;;;AAGG;AACG,SAAU,4BAA4B,CAAC,aAA4B,EAAA;AACvE,IAAA,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;AACjC,IAAA,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;ACjHA;SAOgB,sBAAsB,GAAA;AACpC,IAAA,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;SAEe,uBAAuB,GAAA;AACrC,IAAA,MAAM,WAAW,GAAG;AAClB,QAAA,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;AAEF,IAAA,MAAM,MAAM,GAAG;AACb,QAAA,GAAG,EAAE,IAAI;AACT,QAAA,KAAK,EAAE,CAAI,CAAA,EAAAE,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,OAAO,EAAE,CAAG,CAAA,CAAA;KACrD,CAAC;AAEF,IAAA,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B;;ACvBA;AA+BA,SAAS,cAAc,GAAA;AACrB,IAAA,MAAM,aAAa,GAAG;AACpB,QAAA,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG,EAAA;AAEpB,IAAA,OAAO,aAAa;AACjB,SAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAA,EAAG,cAAc,CAAG,EAAA,IAAI,CAAC,KAAK,CAAA,CAAE,GAAG,EAAE,CAAC;AACjE,QAAA,OAAO,GAAG,IAAI,CAAC,GAAG,CAAG,EAAA,KAAK,EAAE,CAAC;AAC/B,KAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAEM,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;AAGG;SACa,wBAAwB,GAAA;AACtC,IAAA,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;AACrC,IAAA,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;AAC/E,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;AAIG;AACG,SAAU,eAAe,CAAC,aAA6B,EAAA;AAC3D,IAAA,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;UAC3E,sBAAsB,EAAE;AAC1B,UAAE,aAAa,CAAC,GAAG,CAAC;AACxB,IAAA,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;UAC/E,wBAAwB,EAAE;AAC5B,UAAE,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD,IAAA,WAAA,CACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB,EAAA;AAE7B,QAAA,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAC1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAAsB;QAC7B,IAAS,CAAA,SAAA,GAAT,SAAS,CAAQ;QACjB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAQ;KAG9B;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;AAClC,QAAA,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AAED;;AAEG;AACH,IAAA,kBAAkB,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,YAAA,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACrC,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AACvD,SAAA;KACF;AACF;;ACtHD;AACA;AAEA;;AAEG;AACSC,uCAqBX;AArBD,CAAA,UAAY,qBAAqB,EAAA;AAC/B;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,IAAU,CAAA;AACV;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,GAAW,CAAA;AACX;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EArBWA,6BAAqB,KAArBA,6BAAqB,GAqBhC,EAAA,CAAA,CAAA;;AC3BD;AA8CA;AACO,MAAM,sBAAsB,GAAuB;AACxD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AAED;AAEA;;;;;;AAMG;AACa,SAAA,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,iBAAiB,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB,CAAA;QAC7D,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;AACF,aAAA,CAAC,CAAC;AACH,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAClD;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjE;KACF,CAAC;AACJ;;ACxQA;AAYA;;;AAGG;SACa,kCAAkC,GAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;;;AAGG;AACG,MAAO,kCAAmC,SAAQ,iBAAiB,CAAA;AACvE;;;;;AAKG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACnDD;AAYA;;;AAGG;AACa,SAAA,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB,EAAA;IAE9C,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,6BAA8B,SAAQ,iBAAiB,CAAA;AAClE,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,IAAoB,CAAA,oBAAA,GAApB,oBAAoB,CAAQ;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;AACxD,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;AACnE,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzCD;AAMA,IAAI,gBAAwC,CAAC;SAE7B,0BAA0B,GAAA;IACxC,IAAI,CAAC,gBAAgB,EAAE;AACrB,QAAA,gBAAgB,GAAG,IAAIC,mBAAiB,EAAE,CAAC;AAC5C,KAAA;AAED,IAAA,OAAO,gBAAgB,CAAC;AAC1B;;ACdA;SAegB,YAAY,GAAA;IAC1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC9C;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB,CAAA;AAC1C;;AAEG;IACH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;AAEG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;;AAE/C,QAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACtC,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACzE,aAAA;AACF,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/CD;AAgBA;;;AAGG;AACI,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC;AACA,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB,GAAA;IAChC,IAAI,CAAC,OAAO,EAAE;AACZ,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;AAE5D,IAAA,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;AAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC,EAAA;AAElC,IAAA,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,KAAX,IAAA,IAAA,WAAW,KAAX,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;AAC1B,QAAA,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,KAAA;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;AAC3B,IAAA,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;AACjC,QAAA,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;AAGtB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;AACvB,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AACF,SAAA;AACF,KAAA;IACD,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;AACvC,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;AAEG;SACa,WAAW,GAAA;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;AACzB,IAAA,IAAI,OAAO,EAAE;AACX,QAAA,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,CAAC;AAClC,KAAA;AAED,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;AAIG;AACG,SAAU,uBAAuB,CAAC,QAAiB,EAAA;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AAED,IAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;AACnD,IAAA,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;AACL,QAAA,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACa,SAAA,WAAW,CACzB,aAA6B,EAC7B,OAGC,EAAA;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;AAC3C,KAAA;IACD,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;AAC1C,KAAA;IACD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,KAAI;AAChF,YAAA,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW,EAAA;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACjC,IAAA,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;AAClB,QAAA,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;AAChC,KAAA;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACvC,IAAA,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrC,IAAA,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;AACtC,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;AACpE,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;AAC1E,IAAA,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAEK,MAAO,WAAY,SAAQ,iBAAiB,CAAA;AAChD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAe;QAC3B,IAAiB,CAAA,iBAAA,GAAjB,iBAAiB,CAAW;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,CAAA,EAAA,GAAA,IAAI,CAAC,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,GAAG,SAAS,GAAG,iBAAiB,CACvD,EACD;AACA,YAAA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC5C,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACrMD;AAcgB,SAAA,oBAAoB,CAAC,YAAY,GAAG,EAAE,EAAA;IACpD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,oBAAqB,SAAQ,iBAAiB,CAAA;AACzD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE,EAAA;AAE3B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAK;KAG5B;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B,EAAA;AAE/B,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;AACxE,QAAA,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;AAI3C,iBAAA,KAAK,CAAC,MAAM,KAAK,CAAC;AAClB,iBAAA,IAAI,CAAC,CAAC,kBAAkB,KAAI;AAC3B,gBAAA,IAAI,kBAAkB,EAAE;;;AAGtB,oBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEC,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,iBAAA;AACD,gBAAA,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;AAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK,EAAA;AAEnB,IAAA,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;AAC5D,IAAA,IAAI,WAAW,EAAE;AACf,QAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AACtC,KAAA;;;AAID,IAAA,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AAE1E,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;AAKG;AACH,SAAS,yBAAyB,CAAC,IAAY,EAAA;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;AACzB,IAAA,IAAI,IAAI,EAAE;QACR,IAAI;AACF,YAAA,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AACD,QAAA,IACE,YAAY;AACZ,YAAA,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;AACvB,YAAA,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;AACA,YAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC/D,YAAA,IAAI,QAAQ,EAAE;AACZ,gBAAA,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;AACzB,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACH,SAAS,sBAAsB,CAAC,GAAW,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;AAChE,IAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;AAC3B,QAAA,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,CAAA,CAAA,CAAG,CAAC,CAAC;AAClF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACH,eAAe,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC,EAAA;AAEhC,IAAA,MAAM,OAAO,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,kCAAkC,CAAC;AACpF,IAAA,MAAM,MAAM,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,yBAAyB,CAAC;AAC1E,IAAA,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;AACzD,IAAA,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;AAC3B,IAAA,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAClE,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;AAC3B,QAAA,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC7F,KAAA;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;AAQG;AACH,eAAe,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC,EAAA;AAEhC,IAAA,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;AAC9D,IAAA,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;AACrB,IAAA,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAC7D,IAAA,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;AAC3B,IAAA,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;AACrF,QAAA,OAAO,IAAI,CAAC;AACb,KAAA;AAAM,SAAA;QACL,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;AAC5D,KAAA;AACH;;AClMA;AAaA;;;;AAIG;AACG,SAAU,aAAa,CAC3B,sBAAgD,EAAA;IAEhD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAClD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD,EAAA;AAEvD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,IAAsB,CAAA,sBAAA,GAAtB,sBAAsB,CAA0B;KAGxD;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,KAChD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;KACH;AACF;;ACjDD;AAwBA;;;;;;;AAOG;AACG,SAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;IAM3D,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;AACtC,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,KAAK,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,eAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB,EAAA;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AAEpD,IAAA,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB,EAAA;AAC9E,QAAA,IACE,KAAK;AACL,YAAA,KAAK,CAAC,IAAI;AACV,aAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;AAC3B,gBAAA,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED,IAAA,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;;QAExF,IAAI;AACF,YAAA,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,SAAc,EAAE;AACvB,YAAA,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;AACxE,SAAA;AACF,KAAA;AAAM,SAAA;AACL,QAAA,IAAI,GAAG,EAAE;;YAEP,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;AACxC,SAAA;AACD,QAAA,OAAO,iBAAiB,CAAC;AAC1B,KAAA;AACH;;AClIA;AACA;AAEA;;AAEG;AACI,MAAM,8BAA8B,GAAG,CAAC;;ACN/C;AAoBA,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;AASG;SACa,qBAAqB,GAAA;IACnC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACvD;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;AAQG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAI1D,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC,EAAA;AAEjC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,IAAe,CAAA,eAAA,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;KACxE;IAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;AACzE,QAAA,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;AAC/C,YAAA,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;AACA,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAM,aAAA;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACpD,SAAA;KACF;AAEO,IAAA,MAAM,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC,EAAA;;AAEnC,QAAA,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;AAEF,QAAA,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;AAChE,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;AAE1B,gBAAA,MAAM,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE;oBAChC,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,oBAAA,aAAa,EAAE,oBAAoB;AACpC,iBAAA,CAAC,CAAC;AAEH,gBAAA,IAAI,MAAA,WAAW,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AACpC,oBAAA,MAAM,IAAIhB,0BAAU,CAAC,oBAAoB,CAAC,CAAC;AAC5C,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;AACzD,oBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACtC,iBAAA;AAAM,qBAAA;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,YAAY,CAAC;KACrB;IAEM,OAAO,qBAAqB,CAAC,WAAmB,EAAA;AACrD,QAAA,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;AAChD,QAAA,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;AACrC,YAAA,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;AACrE,SAAA;AAAM,aAAA;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;AACnC,SAAA;KACF;IAEM,OAAO,yBAAyB,CAAC,WAAmB,EAAA;QACzD,IAAI;AACF,YAAA,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;AAExB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;AAC9C,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;AACF;;AClID;AAqBA,MAAM,UAAU,GAAGiB,8BAAkB,CAAC;AACpC,IAAA,aAAa,EAAE,EAAE;AACjB,IAAA,SAAS,EAAE,EAAE;AACd,CAAA,CAAC,CAAC;AAYH;;;;AAIG;AACa,SAAA,aAAa,CAAC,cAAA,GAAuC,EAAE,EAAA;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC/D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAGlD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;KAC3C;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;AAC/C,QAAA,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC7D,YAAA,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AACxC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;AAChC,YAAA,MAAM,GAAG,CAAC;AACX,SAAA;KACF;AAED,IAAA,aAAa,CAAC,OAAwB,EAAA;;QACpC,IAAI;;;YAGF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,CAAA,KAAA,EAAQ,OAAO,CAAC,MAAM,CAAA,CAAE,EAAE;AACpD,gBAAA,cAAc,EAAE;oBACd,WAAW,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACL,OAAe,CAAC,WAAW,CAAA,EAAA,EAC/B,IAAI,EAAEC,oBAAQ,CAAC,MAAM,EACtB,CAAA;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;AACvC,iBAAA;AACF,aAAA,CAAC,CAAC;;AAGH,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;AACX,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;AAED,YAAA,MAAM,oBAAoB,GAAG,CAAA,EAAA,GAAA,OAAO,CAAC,cAAc,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;AAE1F,YAAA,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;AAC5C,gBAAA,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;AACzD,aAAA;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;AAC7B,aAAA,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACtD,aAAA;;AAGD,YAAA,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AACvC,YAAA,MAAM,iBAAiB,GAAGC,gCAAoB,CAAC,WAAW,CAAC,CAAC;AAC5D,YAAA,IAAI,iBAAiB,IAAIC,8BAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;AACtD,gBAAA,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;;AAEhF,gBAAA,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC/C,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACrF,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ,EAAA;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;AACrB,aAAA,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;AACvD,aAAA;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B,EAAA;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,EAAE;AACpB,gBAAA,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACzD,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,EAAE;AACxB,aAAA,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;AACF;;AC1KD;AAiKA;;AAEG;MACU,aAAa,CAAA;AAsBxB;;;;AAIG;AACH,IAAA,WAAA,CACE,WAAwD;;IAExD,OAA8B,EAAA;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;AAElF,QAAA,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;AACjD,YAAA,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;AAC5D,YAAA,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;AACzD,SAAA;AAAM,aAAA;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;AACpE,YAAA,IAAIC,0BAAiB,CAAC,WAAW,CAAC,EAAE;AAClC,gBAAA,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;;;;;;;gBAOF,MAAM,oBAAoB,GAA+B,MAAK;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;;oBAE3E,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC,EAAA;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;AACrB,gCAAA,MAAM,IAAI,KAAK,CACb,CAAA,iKAAA,CAAmK,CACpK,CAAC;AACH,6BAAA;AAED,4BAAA,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;AAC/E,gCAAA,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;AACH,6BAAA;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;yBACnE;qBACF,CAAC;AACJ,iBAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;AAC5C,aAAA;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACvE,gBAAA,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;AAChF,gBAAA,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;AAChD,aAAA;AAAM,iBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;AAC5D,gBAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;AAC7D,YAAA,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;;;gBAGlC,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;AACzD,gBAAA,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;AACpD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;AAED;;AAEG;AACH,IAAA,WAAW,CAAC,OAAgD,EAAA;AAC1D,QAAA,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AAC5E,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;AAED,QAAA,IAAI,WAA4B,CAAC;QACjC,IAAI;AACF,YAAA,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AAChC,gBAAA,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC9B,SAAA;AAED,QAAA,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3E,YAAA,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;AACjE,gBAAA,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,MAAM,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B,EAAA;;AAE/B,QAAA,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;AACpD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;AACtC,YAAA,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;AACxC,SAAA;QAED,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,QAAA,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;AAEvD,QAAA,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;AACZ,gBAAA,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;AACH,aAAA;AAED,YAAA,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;AAC9C,YAAA,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;AACtB,gBAAA,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAC3C,aAAA;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;AACzE,gBAAA,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;AACtD,oBAAA,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;AACF,oBAAA,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;AAC9B,wBAAA,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;AAC3D,qBAAA;AACD,oBAAA,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;AACH,iBAAA;AACF,aAAA;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;AAC7E,gBAAA,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;AAC1D,oBAAA,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;AACF,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;AAC7C,4BAAA,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;AACA,4BAAA,IAAI,cAAc,CAAC,gBAAgB,KAAKR,6BAAqB,CAAC,KAAK,EAAE;AACnE,gCAAA,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;;oCAEpC,SAAS;AACV,iCAAA;AAAM,qCAAA;AACL,oCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,wCAAA,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;AACxB,4CAAA,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;AAC9D,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,gCAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,6BAAA;AACF,yBAAA;AACD,wBAAA,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAChC,4BAAA,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;AACtC,gCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,oCAAA,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;AACxC,wCAAA,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7E,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA;AACL,gCAAA,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;AAC/D,6BAAA;AACF,yBAAA;AACD,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;AACxC,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;AAC/D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,yBAAA;AACD,wBAAA,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;AACH,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;AACzE,YAAA,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;AACtD,aAAA;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAClC,gBAAA,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAC5D,oBAAA,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;AACF,wBAAA,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;AACxE,6BAAA,sBAAsB,CAAC;AAC1B,wBAAA,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AAC1C,gCAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AACzE,6BAAA;AACF,yBAAA;AAAM,6BAAA;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;AACnC,gCAAA,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;AACH,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;AAC3E,YAAA,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,oBAAA,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;AACpD,wBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;AACpF,qBAAA;AACF,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,oBAAA,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AAC/C,iBAAA;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,oBAAA,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACvC,iBAAA;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,oBAAA,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AACzD,iBAAA;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;AAC9B,oBAAA,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AAC7D,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;;AAEtB,oBAAA,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxD,iBAAA;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,oBAAA,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AACrD,iBAAA;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;AACjF,oBAAA,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;AAC3D,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;AAE3E,YAAA,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;AACvD,gBAAA,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;AACrF,aAAA;AAED,YAAA,IAAI,WAAkC,CAAC;AACvC,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACnD,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;AAC1B,aAAA;AACD,YAAA,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;AAC7B,oBAAA,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;AAClD,wBAAA,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3C,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;AACpB,QAAA,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5B,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAEK,SAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAAA;;IAE5B,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AAC9E,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;AAC1C,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;AACnD,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;AAEF,IAAA,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;AACjE,QAAA,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;AAEF,QAAA,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;AACpD,QAAA,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;AACb,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;AACF,YAAA,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,KAAK,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;AACF,gBAAA,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;AAEF,gBAAA,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;AACvB,oBAAA,MAAM,QAAQ,GAAG,kBAAkB,GAAG,CAAS,MAAA,EAAA,kBAAkB,CAAE,CAAA,GAAG,OAAO,CAAC;AAC9E,oBAAA,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACF,oBAAA,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BS,kBAAwB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CACF,CAAC;AACH,qBAAA;yBAAM,IAAI,CAAC,QAAQ,EAAE;AACpB,wBAAA,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACF,iBAAA;AAAM,qBAAA,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;AAC9B,qBAAC,CAAA,CAAA,EAAA,GAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;;;oBAGA,OAAO;AACR,iBAAA;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;AACrD,iBAAA;AACF,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,CAAA,OAAA,EAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,CAAA,CAAA,CAAG,CACL,CAAC;AACH,SAAA;AACF,KAAA;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC1F,QAAA,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;AAC1B,QAAA,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;AAChE,YAAA,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,YAAA,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;AAC3E,gBAAA,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACH,CAAC;AAED;;AAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC,EAAA;;;AAIpC,IAAA,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,QAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,YAAY,EAAE,CAAC;AACnD,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC,EAAA;AAEjC,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;AAChB,KAAA;AAAM,SAAA;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;AAC/B,QAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;AAC/B,YAAA,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B,EAAA;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;AAClF,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AACnC,KAAA;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;AAC/C,QAAA,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;AAC5F,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;AAEzE,IAAA,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;AAC1B,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;AACzC,KAAA;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;AAE3E,IAAA,IAAI,MAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;AAEnD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;AAKG;AACa,SAAA,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC,EAAA;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;AACrC,QAAA,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;AAC7C,KAAA;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;;;AAIrE,QAAA,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;AACtD,YAAA,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAC1C,SAAA;AAED,QAAA,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1C,KAAA;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,CAAA,EACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,CAAA,EACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,CAAA,EACtB,eAAe,CAAC,eAAe,CACnC,CAAC;AAEF,IAAA,IAAI,MAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AACxE,KAAA;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,CAAA,EAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;AAEF,IAAA,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;AACzE,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AAChD,KAAA;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;AAEvD,IAAA,IAAI,MAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;AAC1D,QAAA,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;AACnE,KAAA;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAsBD,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB,EAAA;AAEtB,IAAA,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB,EAAA;;AAEtB,IAAA,IAAI,KAAU,CAAC;AACf,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;AACrC,QAAA,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;AACjC,KAAA;IACD,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,IAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AAChC,QAAA,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;AAC9B,gBAAA,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;AACtC,aAAA;AAAM,iBAAA;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;AACF,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;AACvC,oBAAA,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;AACnF,iBAAA;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;AAC5B,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;AACb,wBAAA,eAAe,CAAC,QAAQ;AACxB,6BAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;AAClE,iBAAA;AACD,gBAAA,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;AAC7F,aAAA;;YAGD,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;AACtF,SAAA;AACF,KAAA;AAAM,SAAA;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;AACZ,SAAA;AAED,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;AACF,YAAA,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;AAChE,YAAA,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;AAC3F,YAAA,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;AACZ,iBAAA;AACD,gBAAA,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;AACrC,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB,EAAA;AAEvB,IAAA,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,QAAA,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;AAC1E,YAAA,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AAAM,aAAA;YACL,MAAM;AACP,SAAA;AACF,KAAA;AACD,IAAA,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;AAC7B,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAC7B,SAAgC,EAChC,YAA2C,EAAA;AAE3C,IAAA,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;AAC9C,IAAA,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;AAE3D,IAAA,MAAM,oBAAoB,GAAG,CAC3B,GAAM,KAGJ;AACF,QAAA,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;AAC7C,YAAA,KAAK,EAAE,SAAS;AACjB,SAAA,CAEA,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,IAAI,UAAU,EAAE;AACd,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,YAAA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;AACJ,SAAA;AAED,QAAA,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;AACF,QAAA,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;AACjD,YAAA,MAAM,aAAa,GAAG,CAAC,IAAI,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;AAC9C,gBAAA,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChD,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACzC,iBAAA;AACF,aAAA;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;AACpC,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAED,QAAA,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACJ,SAAA;AACF,KAAA;AAED,IAAA,IACE,UAAU;AACV,QAAA,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;AACnC,QAAAC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,IAAI,EAAE,SAAS,CAAC,UAAU,EAAA,CAAA,CAC1B,CAAC;AACJ,KAAA;IAED,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB,EAAA;AAEhB,IAAA,IAAI,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;AAC7B,QAAA,MAAM,MAAM,GAAG,OAAO,CAAC,gBAAgB,CAAC;AACxC,QAAA,OAAO,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,cAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;cAChD,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;AAChC,KAAA;AAED,IAAA,IAAI,OAAO,EAAE;QACX,OAAO,CAAA,EAAG,OAAO,CAAA,SAAA,CAAW,CAAC;AAC9B,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB;;ACrjCA;AA2BA;;;;;;;;AAQG;AACG,SAAU,kBAAkB,CAChC,IAAgB,EAAA;AAKhB,IAAA,OAAOC,8BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C;;AC3CA;AACA;AAIA;;AAEG;AACI,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;AAqBlD;;;;;;AAMG;MACU,wBAAwB,CAAA;AAInC;;;AAGG;AACH,IAAA,WAAA,CAAY,uBAA+B,oBAAoB,EAAA;QANvD,IAAW,CAAA,WAAA,GAAiB,SAAS,CAAC;AAO5C,QAAA,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;KAClD;AAED;;;AAGG;AACH,IAAA,cAAc,CAAC,WAAoC,EAAA;AACjD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;KAChC;AAED;;AAEG;IACH,cAAc,GAAA;QACZ,IACE,IAAI,CAAC,WAAW;AAChB,YAAA,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;AACA,YAAA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;AAC9B,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF;;ACrED;AACA;AAIA;;;;AAIG;MACU,oBAAoB,CAAA;AAI/B,IAAA,WAAA,CACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK,EAAA;QAFpD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAiB;QAC3B,IAAM,CAAA,MAAA,GAAN,MAAM,CAAmB;QACzB,IAAoC,CAAA,oCAAA,GAApC,oCAAoC,CAAgB;QALtD,IAAU,CAAA,UAAA,GAAG,CAAC,CAAC;KAMnB;AAEJ;;;AAGG;IACI,OAAO,GAAA;;AAEZ,QAAA,QACE,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,EAC5F;KACH;AAED;;;;;AAKG;IACK,MAAM,QAAQ,CAAC,OAAwB,EAAA;AAC7C,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;AAC7B,QAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnE,QAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;KAC3B;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,OAAwB,EAAA;AACrC,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AACF;;ACvDD;AASA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;AAEG;MACU,8BAA8B,CAAA;AAiBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B,EAAA;AAhB5D;;;AAGG;QACH,IAAmB,CAAA,mBAAA,GAAW,4BAA4B,CAAC;AAczD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,MAAM,WAAW,GAAG,CAAA,EAAG,IAAI,CAAC,QAAQ,CAAA,CAAA,EAAI,IAAI,CAAC,QAAQ,CAAA,CAAE,CAAC;AACxD,QAAA,MAAM,kBAAkB,GAAG,CAAG,EAAA,IAAI,CAAC,mBAAmB,CAAA,CAAA,EAAIC,YAAmB,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;AAAE,YAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;AAC3E,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACpED;AAqBA;;AAEG;MACU,iBAAiB,CAAA;AAU5B;;AAEG;AACH,IAAA,WAAA,CAAY,OAAgC,EAAA;AAC1C,QAAA,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AAClE,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,wHAAA,CAA0H,CAC3H,CAAC;AACH,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AACjC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,CAAA,qEAAA,CAAuE,CAAC,CACnF,CAAC;AACH,SAAA;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;AACxB,gBAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACzC,aAAA;AACD,YAAA,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;AACtC,gBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;AAChE,aAAA;AACF,SAAA;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAA,yCAAA,CAA2C,CAAC,CAAC,CAAC;AAC/E,aAAA;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;AACpC,gBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,aAAA;AACD,YAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAClC,oBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,iBAAA;AACD,gBAAA,WAAW,CAAC,GAAG,IAAI,CAAA,EAAG,GAAG,CAAA,CAAA,EAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;AAClD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACtFD;AAKA;;AAEG;AACG,MAAO,gBAAiB,SAAQ,iBAAiB,CAAA;AACrD;;;;AAIG;AACH,IAAA,WAAA,CAAY,QAAgB,EAAA;QAC1B,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;AAC3D,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,MAAM,OAAO,GAA4B;AACvC,YAAA,QAAQ,EAAE;AACR,gBAAA,aAAa,EAAE,QAAQ;AACxB,aAAA;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;KAChB;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dom-shim.d.ts b/node_modules/@azure/core-http/dom-shim.d.ts new file mode 100644 index 00000000..cb4ac4d9 --- /dev/null +++ b/node_modules/@azure/core-http/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/core-http/node_modules/.bin/uuid b/node_modules/@azure/core-http/node_modules/.bin/uuid new file mode 120000 index 00000000..588f70ec --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/form-data/License b/node_modules/@azure/core-http/node_modules/form-data/License new file mode 100644 index 00000000..c7ff12a2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@azure/core-http/node_modules/form-data/README.md.bak b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak new file mode 100644 index 00000000..298a1a24 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/Readme.md b/node_modules/@azure/core-http/node_modules/form-data/Readme.md new file mode 100644 index 00000000..298a1a24 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/index.d.ts b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts new file mode 100644 index 00000000..295e9e9b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js new file mode 100644 index 00000000..09e7c70e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js new file mode 100644 index 00000000..18dc819c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js new file mode 100644 index 00000000..4d35738d --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/package.json b/node_modules/@azure/core-http/node_modules/form-data/package.json new file mode 100644 index 00000000..0f20240b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE b/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE new file mode 100644 index 00000000..22204e87 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE @@ -0,0 +1,12 @@ +Copyright (c) 2015, Salesforce.com, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/README.md b/node_modules/@azure/core-http/node_modules/tough-cookie/README.md new file mode 100644 index 00000000..2dc9496d --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/README.md @@ -0,0 +1,582 @@ +[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js + +[![npm package](https://nodei.co/npm/tough-cookie.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/tough-cookie/) + +[![Build Status](https://travis-ci.org/salesforce/tough-cookie.svg?branch=master)](https://travis-ci.org/salesforce/tough-cookie) + +# Synopsis + +``` javascript +var tough = require('tough-cookie'); +var Cookie = tough.Cookie; +var cookie = Cookie.parse(header); +cookie.value = 'somethingdifferent'; +header = cookie.toString(); + +var cookiejar = new tough.CookieJar(); +cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb); +// ... +cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) { + res.headers['cookie'] = cookies.join('; '); +}); +``` + +# Installation + +It's _so_ easy! + +`npm install tough-cookie` + +Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken. + +## Version Support + +Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module. + +# API + +## tough + +Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound". + +**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary. + +### `parseDate(string)` + +Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. + +### `formatDate(date)` + +Format a Date into a RFC1123 string (the RFC6265-recommended format). + +### `canonicalDomain(str)` + +Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects). + +### `domainMatch(str,domStr[,canonicalize=true])` + +Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". + +The `canonicalize` parameter will run the other two parameters through `canonicalDomain` or not. + +### `defaultPath(path)` + +Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. + +The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output. + +### `pathMatch(reqPath,cookiePath)` + +Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean. + +This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`. + +### `parse(cookieString[, options])` + +alias for `Cookie.parse(cookieString[, options])` + +### `fromJSON(string)` + +alias for `Cookie.fromJSON(string)` + +### `getPublicSuffix(hostname)` + +Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it. + +For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`. + +For further information, see http://publicsuffix.org/. This module derives its list from that site. This call is currently a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [get() method](https://www.npmjs.com/package/psl#pslgetdomain). + +### `cookieCompare(a,b)` + +For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence: + +* Longest `.path` +* oldest `.creation` (which has a 1ms precision, same as `Date`) +* lowest `.creationIndex` (to get beyond the 1ms precision) + +``` javascript +var cookies = [ /* unsorted array of Cookie objects */ ]; +cookies = cookies.sort(cookieCompare); +``` + +**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`. + +### `permuteDomain(domain)` + +Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores. + +### `permutePath(path)` + +Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores. + + +## Cookie + +Exported via `tough.Cookie`. + +### `Cookie.parse(cookieString[, options])` + +Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. + +The options parameter is not required and currently has only one property: + + * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant. + +If options is not an object, it is ignored, which means you can use `Array#map` with it. + +Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response: + +``` javascript +if (res.headers['set-cookie'] instanceof Array) + cookies = res.headers['set-cookie'].map(Cookie.parse); +else + cookies = [Cookie.parse(res.headers['set-cookie'])]; +``` + +_Note:_ in version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation has since been removed. +See [Issue 92](https://github.com/salesforce/tough-cookie/issues/92) + +### Properties + +Cookie object properties: + + * _key_ - string - the name or key of the cookie (default "") + * _value_ - string - the value of the cookie (default "") + * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()` + * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()` + * _domain_ - string - the `Domain=` attribute of the cookie + * _path_ - string - the `Path=` of the cookie + * _secure_ - boolean - the `Secure` cookie flag + * _httpOnly_ - boolean - the `HttpOnly` cookie flag + * _sameSite_ - string - the `SameSite` cookie attribute (from [RFC6265bis]); must be one of `none`, `lax`, or `strict` + * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside) + * _creation_ - `Date` - when this cookie was constructed + * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation) + +After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes: + + * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied) + * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one. + * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar + * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute. + +### `Cookie([{properties}])` + +Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties. + +### `.toString()` + +encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`. + +### `.cookieString()` + +encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '='). + +### `.setExpires(String)` + +sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set. + +### `.setMaxAge(number)` + +sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly. + +### `.expiryTime([now=Date.now()])` + +### `.expiryDate([now=Date.now()])` + +expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds. + +Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` parameter -- is used to offset the `.maxAge` attribute. + +If Expires (`.expires`) is set, that's returned. + +Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents). + +### `.TTL([now=Date.now()])` + +compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply. + +The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned. + +### `.canonicalizedDomain()` + +### `.cdomain()` + +return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters. + +### `.toJSON()` + +For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`). + +**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +### `Cookie.fromJSON(strOrObj)` + +Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer. + +Returns `null` upon JSON parsing error. + +### `.clone()` + +Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`. + +### `.validate()` + +Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive. + +validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct: + +``` javascript +if (cookie.validate() === true) { + // it's tasty +} else { + // yuck! +} +``` + + +## CookieJar + +Exported via `tough.CookieJar`. + +### `CookieJar([store],[options])` + +Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used. + +The `options` object can be omitted and can have the following properties: + + * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk" + * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name. + * _prefixSecurity_ - string - default `silent` - set to `'unsafe-disabled'`, `'silent'`, or `'strict'`. See [Cookie Prefixes] below. + * _allowSpecialUseDomain_ - boolean - default `false` - accepts special-use domain suffixes, such as `local`. Useful for testing purposes. + This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers. + +Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods. + +### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))` + +Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option. + * _sameSiteContext_ - string - default unset - set to `'none'`, `'lax'`, or `'strict'` See [SameSite Cookies] below. + +As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual). + +### `.setCookieSync(cookieOrString, currentUrl, [{options}])` + +Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookies(currentUrl, [{options},] cb(err,cookies))` + +Retrieve the list of cookies that can be sent in a Cookie header for the current url. + +If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially). + * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it). + * _sameSiteContext_ - string - default unset - Set this to `'none'`, `'lax'` or `'strict'` to enforce SameSite cookies upon retrival. See [SameSite Cookies] below. + +The `.lastAccessed` property of the returned cookies will have been updated. + +### `.getCookiesSync(currentUrl, [{options}])` + +Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookieString(...)` + +Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`. + +### `.getCookieStringSync(...)` + +Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getSetCookieStrings(...)` + +Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`. + +### `.getSetCookieStringsSync(...)` + +Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.serialize(cb(err,serializedObject))` + +Serialize the Jar if the underlying store supports `.getAllCookies`. + +**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +See [Serialization Format]. + +### `.serializeSync()` + +Sync version of .serialize + +### `.toJSON()` + +Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`. + +### `CookieJar.deserialize(serialized, [store], cb(err,object))` + +A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. + +As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback. + +### `CookieJar.deserializeSync(serialized, [store])` + +Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work. + +### `CookieJar.fromJSON(string)` + +Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`. + +### `.clone([store,]cb(err,newJar))` + +Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`. + +### `.cloneSync([store])` + +Synchronous version of `.clone`, returning a new `CookieJar` instance. + +The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used. + +The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls. + +### `.removeAllCookies(cb(err))` + +Removes all cookies from the jar. + +This is a new backwards-compatible feature of `tough-cookie` version 2.5, so not all Stores will implement it efficiently. For Stores that do not implement `removeAllCookies`, the fallback is to call `removeCookie` after `getAllCookies`. If `getAllCookies` fails or isn't implemented in the Store, that error is returned. If one or more of the `removeCookie` calls fail, only the first error is returned. + +### `.removeAllCookiesSync()` + +Sync version of `.removeAllCookies()` + +## Store + +Base class for CookieJar stores. Available as `tough.Store`. + +## Store API + +The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores. + +Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. + +Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style + +All `domain` parameters will have been normalized before calling. + +The Cookie store must have all of the following methods. + +### `store.findCookie(domain, path, key, cb(err,cookie))` + +Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned. + +Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error). + +### `store.findCookies(domain, path, cb(err,cookies))` + +Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above. + +If no cookies are found, the callback MUST be passed an empty array. + +The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done. + +As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only). + +### `store.putCookie(cookie, cb(err))` + +Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur. + +The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties. + +Pass an error if the cookie cannot be stored. + +### `store.updateCookie(oldCookie, newCookie, cb(err))` + +Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store. + +The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement). + +Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object. + +The `newCookie` and `oldCookie` objects MUST NOT be modified. + +Pass an error if the newCookie cannot be stored. + +### `store.removeCookie(domain, path, key, cb(err))` + +Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint). + +The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie. + +### `store.removeCookies(domain, path, cb(err))` + +Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed. + +Pass an error ONLY if removing any existing cookies failed. + +### `store.removeAllCookies(cb(err))` + +_Optional_. Removes all cookies from the store. + +Pass an error if one or more cookies can't be removed. + +**Note**: New method as of `tough-cookie` version 2.5, so not all Stores will implement this, plus some stores may choose not to implement this. + +### `store.getAllCookies(cb(err, cookies))` + +_Optional_. Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure. + +Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail. + +Pass an error if retrieval fails. + +**Note**: not all Stores can implement this due to technical limitations, so it is optional. + +## MemoryCookieStore + +Inherits from `Store`. + +A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. Supports serialization, `getAllCookies`, and `removeAllCookies`. + +## Community Cookie Stores + +These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look: + +- [`db-cookie-store`](https://github.com/JSBizon/db-cookie-store): SQL including SQLite-based databases +- [`file-cookie-store`](https://github.com/JSBizon/file-cookie-store): Netscape cookie file format on disk +- [`redis-cookie-store`](https://github.com/benkroeger/redis-cookie-store): Redis +- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk +- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage + + +# Serialization Format + +**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`. + +```js + { + // The version of tough-cookie that serialized this jar. + version: 'tough-cookie@1.x.y', + + // add the store type, to make humans happy: + storeType: 'MemoryCookieStore', + + // CookieJar configuration: + rejectPublicSuffixes: true, + // ... future items go here + + // Gets filled from jar.store.getAllCookies(): + cookies: [ + { + key: 'string', + value: 'string', + // ... + /* other Cookie.serializableProperties go here */ + } + ] + } +``` + +# RFC6265bis + +Support for RFC6265bis revision 02 is being developed. Since this is a bit of an omnibus revision to the RFC6252, support is broken up into the functional areas. + +## Leave Secure Cookies Alone + +Not yet supported. + +This change makes it so that if a cookie is sent from the server to the client with a `Secure` attribute, the channel must also be secure or the cookie is ignored. + +## SameSite Cookies + +Supported. + +This change makes it possible for servers, and supporting clients, to mitigate certain types of CSRF attacks by disallowing `SameSite` cookies from being sent cross-origin. + +On the Cookie object itself, you can get/set the `.sameSite` attribute, which will be serialized into the `SameSite=` cookie attribute. When unset or `undefined`, no `SameSite=` attribute will be serialized. The valid values of this attribute are `'none'`, `'lax'`, or `'strict'`. Other values will be serialized as-is. + +When parsing cookies with a `SameSite` cookie attribute, values other than `'lax'` or `'strict'` are parsed as `'none'`. For example, `SomeCookie=SomeValue; SameSite=garbage` will parse so that `cookie.sameSite === 'none'`. + +In order to support SameSite cookies, you must provide a `sameSiteContext` option to _both_ `setCookie` and `getCookies`. Valid values for this option are just like for the Cookie object, but have particular meanings: +1. `'strict'` mode - If the request is on the same "site for cookies" (see the RFC draft for what this means), pass this option to add a layer of defense against CSRF. +2. `'lax'` mode - If the request is from another site, _but_ is directly because of navigation by the user, e.g., `` or ``, pass `sameSiteContext: 'lax'`. +3. `'none'` - Otherwise, pass `sameSiteContext: 'none'` (this indicates a cross-origin request). +4. unset/`undefined` - SameSite **will not** be enforced! This can be a valid use-case for when CSRF isn't in the threat model of the system being built. + +It is highly recommended that you read RFC 6265bis for fine details on SameSite cookies. In particular [Section 8.8](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02#section-8.8) discusses security considerations and defense in depth. + +## Cookie Prefixes + +Supported. + +Cookie prefixes are a way to indicate that a given cookie was set with a set of attributes simply by inspecting the first few characters of the cookie's name. + +Cookie prefixes are defined in [Section 4.1.3 of 6265bis](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.3). Two prefixes are defined: + +1. `"__Secure-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Secure-", then the cookie will have been set with a "Secure" attribute. +2. `"__Host-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Host-", then the cookie will have been set with a "Secure" attribute, a "Path" attribute with a value of "/", and no "Domain" attribute. + +If `prefixSecurity` is enabled for `CookieJar`, then cookies that match the prefixes defined above but do not obey the attribute restrictions will not be added. + +You can define this functionality by passing in `prefixSecurity` option to `CookieJar`. It can be one of 3 values: + +1. `silent`: Enable cookie prefix checking but silently fail to add the cookie if conditions not met. Default. +2. `strict`: Enable cookie prefix checking and error out if conditions not met. +3. `unsafe-disabled`: Disable cookie prefix checking. + +Note that if `ignoreError` is passed in as `true` then the error will be silent regardless of `prefixSecurity` option (assuming it's enabled). + + +# Copyright and License + +BSD-3-Clause: + +```text + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +``` diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js new file mode 100644 index 00000000..a042893e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js @@ -0,0 +1,1671 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const punycode = require("punycode"); +const urlParse = require("url").parse; +const util = require("util"); +const pubsuffix = require("./pubsuffix-psl"); +const Store = require("./store").Store; +const MemoryCookieStore = require("./memstore").MemoryCookieStore; +const pathMatch = require("./pathMatch").pathMatch; +const VERSION = require("./version"); +const { fromCallback } = require("universalify"); + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; + +const CONTROL_CHARS = /[\x00-\x1F]/; + +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +const TERMINATORS = ["\n", "\r", "\0"]; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +// date-time parsing constants (RFC6265 S5.1.1) + +const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +const MONTH_TO_NUM = { + jan: 0, + feb: 1, + mar: 2, + apr: 3, + may: 4, + jun: 5, + jul: 6, + aug: 7, + sep: 8, + oct: 9, + nov: 10, + dec: 11 +}; + +const MAX_TIME = 2147483647000; // 31-bit max +const MIN_TIME = 0; // 31-bit min +const SAME_SITE_CONTEXT_VAL_ERR = + 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"'; + +function checkSameSiteContext(value) { + const context = String(value).toLowerCase(); + if (context === "none" || context === "lax" || context === "strict") { + return context; + } else { + return null; + } +} + +const PrefixSecurityEnum = Object.freeze({ + SILENT: "silent", + STRICT: "strict", + DISABLED: "unsafe-disabled" +}); + +// Dumped from ip-regex@4.0.0, with the following changes: +// * all capturing groups converted to non-capturing -- "(?:)" +// * support for IPv6 Scoped Literal ("%eth1") removed +// * lowercase hexadecimal only +var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/; + +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + let count = 0; + while (count < token.length) { + const c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2f || c >= 0x3a) { + break; + } + count++; + } + + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; + } + + if (!trailingOK && count != token.length) { + return null; + } + + return parseInt(token.substr(0, count), 10); +} + +function parseTime(token) { + const parts = token.split(":"); + const result = [0, 0, 0]; + + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ + + if (parts.length !== 3) { + return null; + } + + for (let i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + const trailingOK = i == 2; + const num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; + } + result[i] = num; + } + + return result; +} + +function parseMonth(token) { + token = String(token) + .substr(0, 3) + .toLowerCase(); + const num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; +} + +/* + * RFC6265 S5.1.1 date parser (see RFC for full grammar) + */ +function parseDate(str) { + if (!str) { + return; + } + + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + const tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + let hour = null; + let minute = null; + let second = null; + let dayOfMonth = null; + let month = null; + let year = null; + + for (let i = 0; i < tokens.length; i++) { + const token = tokens[i].trim(); + if (!token.length) { + continue; + } + + let result; + + /* 2.1. If the found-time flag is not set and the token matches the time + * production, set the found-time flag and set the hour- value, + * minute-value, and second-value to the numbers denoted by the digits in + * the date-token, respectively. Skip the remaining sub-steps and continue + * to the next date-token. + */ + if (second === null) { + result = parseTime(token); + if (result) { + hour = result[0]; + minute = result[1]; + second = result[2]; + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (dayOfMonth === null) { + // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 1, 2, true); + if (result !== null) { + dayOfMonth = result; + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = parseMonth(token); + if (result !== null) { + month = result; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the + * year production, set the found-year flag and set the year-value to the + * number denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (year === null) { + // "year = 2*4DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 2, 4, true); + if (result !== null) { + year = result; + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (year >= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } + } + } + + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || + month === null || + year === null || + second === null || + dayOfMonth < 1 || + dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } + + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} + +function formatDate(date) { + return date.toUTCString(); +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * S5.1.3: + * "A string domain-matches a given domain string if at least one of the + * following conditions hold:" + * + * " o The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* " o All of the following [three] conditions hold:" */ + + /* "* The domain string is a suffix of the string" */ + const idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // next, check it's a proper suffix + // e.g., "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { + return false; // it's not a suffix + } + + /* " * The last character of the string that is not included in the + * domain string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; // doesn't align on "." + } + + /* " * The string is a host name (i.e., not an IP address)." */ + if (IP_REGEX_LOWERCASE.test(str)) { + return false; // it's an IP address + } + + return true; +} + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0, 1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + const rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + +function trimTerminator(str) { + for (let t = 0; t < TERMINATORS.length; t++) { + const terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0, terminatorIdx); + } + } + + return str; +} + +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); + + let firstEq = cookiePair.indexOf("="); + if (looseMode) { + if (firstEq === 0) { + // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf("="); // might still need to split on '=' + } + } else { + // non-loose mode + if (firstEq <= 0) { + // no '=' or is at start + return; // needs to have non-empty "cookie-name" + } + } + + let cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq + 1).trim(); + } + + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } + + const c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; +} + +function parse(str, options) { + if (!options || typeof options !== "object") { + options = {}; + } + str = str.trim(); + + // We use a regex to parse the "name-value-pair" part of S5.2 + const firstSemi = str.indexOf(";"); // S5.2 step 1 + const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi); + const c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + const unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + const cookie_avs = unparsed.split(";"); + while (cookie_avs.length) { + const av = cookie_avs.shift().trim(); + if (av.length === 0) { + // happens if ";;" appears + continue; + } + const av_sep = av.indexOf("="); + let av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0, av_sep); + av_value = av.substr(av_sep + 1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch (av_key) { + case "expires": // S5.2.1 + if (av_value) { + const exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case "max-age": // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + const delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case "domain": // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + const domain = av_value.trim().replace(/^\./, ""); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case "path": // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case "secure": // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case "httponly": // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + case "samesite": // RFC6265bis-02 S5.3.7 + const enforcement = av_value ? av_value.toLowerCase() : ""; + switch (enforcement) { + case "strict": + c.sameSite = "strict"; + break; + case "lax": + c.sameSite = "lax"; + break; + default: + // RFC6265bis-02 S5.3.7 step 1: + // "If cookie-av's attribute-value is not a case-insensitive match + // for "Strict" or "Lax", ignore the "cookie-av"." + // This effectively sets it to 'none' from the prototype. + break; + } + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Secure-", abort these steps and ignore the cookie + * entirely unless the cookie's secure-only-flag is true. + * @param cookie + * @returns boolean + */ +function isSecurePrefixConditionMet(cookie) { + return !cookie.key.startsWith("__Secure-") || cookie.secure; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Host-", abort these steps and ignore the cookie + * entirely unless the cookie meets all the following criteria: + * 1. The cookie's secure-only-flag is true. + * 2. The cookie's host-only-flag is true. + * 3. The cookie-attribute-list contains an attribute with an + * attribute-name of "Path", and the cookie's path is "/". + * @param cookie + * @returns boolean + */ +function isHostPrefixConditionMet(cookie) { + return ( + !cookie.key.startsWith("__Host-") || + (cookie.secure && + cookie.hostOnly && + cookie.path != null && + cookie.path === "/") + ); +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + let obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + let obj; + if (typeof str === "string") { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + const c = new Cookie(); + for (let i = 0; i < Cookie.serializableProperties.length; i++) { + const prop = Cookie.serializableProperties[i]; + if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if (prop === "expires" || prop === "creation" || prop === "lastAccessed") { + if (obj[prop] === null) { + c[prop] = null; + } else { + c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]); + } + } else { + c[prop] = obj[prop]; + } + } + + return c; +} + +/* Section 5.4 part 2: + * "* Cookies with longer paths are listed before cookies with + * shorter paths. + * + * * Among cookies that have equal-length path fields, cookies with + * earlier creation-times are listed before cookies with later + * creation-times." + */ + +function cookieCompare(a, b) { + let cmp = 0; + + // descending for length: b CMP a + const aPathLen = a.path ? a.path.length : 0; + const bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; + } + + // ascending for time: a CMP b + const aTime = a.creation ? a.creation.getTime() : MAX_TIME; + const bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; + } + + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; + + return cmp; +} + +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === "/") { + return ["/"]; + } + const permutations = [path]; + while (path.length > 1) { + const lindex = path.lastIndexOf("/"); + if (lindex === 0) { + break; + } + path = path.substr(0, lindex); + permutations.push(path); + } + permutations.push("/"); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } catch (err) { + // Silently swallow error + } + + return urlParse(url); +} + +const cookieDefaults = { + // the order in which the RFC has them: + key: "", + value: "", + expires: "Infinity", + maxAge: null, + domain: null, + path: null, + secure: false, + httpOnly: false, + extensions: null, + // set by the CookieJar: + hostOnly: null, + pathIsDefault: null, + creation: null, + lastAccessed: null, + sameSite: "none" +}; + +class Cookie { + constructor(options = {}) { + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + + Object.assign(this, cookieDefaults, options); + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, "creationIndex", { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); + } + + inspect() { + const now = Date.now(); + const hostOnly = this.hostOnly != null ? this.hostOnly : "?"; + const createAge = this.creation + ? `${now - this.creation.getTime()}ms` + : "?"; + const accessAge = this.lastAccessed + ? `${now - this.lastAccessed.getTime()}ms` + : "?"; + return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`; + } + + toJSON() { + const obj = {}; + + for (const prop of Cookie.serializableProperties) { + if (this[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if ( + prop === "expires" || + prop === "creation" || + prop === "lastAccessed" + ) { + if (this[prop] === null) { + obj[prop] = null; + } else { + obj[prop] = + this[prop] == "Infinity" // intentionally not === + ? "Infinity" + : this[prop].toISOString(); + } + } else if (prop === "maxAge") { + if (this[prop] !== null) { + // again, intentionally not === + obj[prop] = + this[prop] == Infinity || this[prop] == -Infinity + ? this[prop].toString() + : this[prop]; + } + } else { + if (this[prop] !== cookieDefaults[prop]) { + obj[prop] = this[prop]; + } + } + } + + return obj; + } + + clone() { + return fromJSON(this.toJSON()); + } + + validate() { + if (!COOKIE_OCTETS.test(this.value)) { + return false; + } + if ( + this.expires != Infinity && + !(this.expires instanceof Date) && + !parseDate(this.expires) + ) { + return false; + } + if (this.maxAge != null && this.maxAge <= 0) { + return false; // "Max-Age=" non-zero-digit *DIGIT + } + if (this.path != null && !PATH_VALUE.test(this.path)) { + return false; + } + + const cdomain = this.cdomain(); + if (cdomain) { + if (cdomain.match(/\.$/)) { + return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this + } + const suffix = pubsuffix.getPublicSuffix(cdomain); + if (suffix == null) { + // it's a public suffix + return false; + } + } + return true; + } + + setExpires(exp) { + if (exp instanceof Date) { + this.expires = exp; + } else { + this.expires = parseDate(exp) || "Infinity"; + } + } + + setMaxAge(age) { + if (age === Infinity || age === -Infinity) { + this.maxAge = age.toString(); // so JSON.stringify() works + } else { + this.maxAge = age; + } + } + + cookieString() { + let val = this.value; + if (val == null) { + val = ""; + } + if (this.key === "") { + return val; + } + return `${this.key}=${val}`; + } + + // gives Set-Cookie header format + toString() { + let str = this.cookieString(); + + if (this.expires != Infinity) { + if (this.expires instanceof Date) { + str += `; Expires=${formatDate(this.expires)}`; + } else { + str += `; Expires=${this.expires}`; + } + } + + if (this.maxAge != null && this.maxAge != Infinity) { + str += `; Max-Age=${this.maxAge}`; + } + + if (this.domain && !this.hostOnly) { + str += `; Domain=${this.domain}`; + } + if (this.path) { + str += `; Path=${this.path}`; + } + + if (this.secure) { + str += "; Secure"; + } + if (this.httpOnly) { + str += "; HttpOnly"; + } + if (this.sameSite && this.sameSite !== "none") { + const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()]; + str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`; + } + if (this.extensions) { + this.extensions.forEach(ext => { + str += `; ${ext}`; + }); + } + + return str; + } + + // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + // S5.3 says to give the "latest representable date" for which we use Infinity + // For "expired" we use 0 + TTL(now) { + /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires + * attribute, the Max-Age attribute has precedence and controls the + * expiration date of the cookie. + * (Concurs with S5.3 step 3) + */ + if (this.maxAge != null) { + return this.maxAge <= 0 ? 0 : this.maxAge * 1000; + } + + let expires = this.expires; + if (expires != Infinity) { + if (!(expires instanceof Date)) { + expires = parseDate(expires) || Infinity; + } + + if (expires == Infinity) { + return Infinity; + } + + return expires.getTime() - (now || Date.now()); + } + + return Infinity; + } + + // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + expiryTime(now) { + if (this.maxAge != null) { + const relativeTo = now || this.creation || new Date(); + const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000; + return relativeTo.getTime() + age; + } + + if (this.expires == Infinity) { + return Infinity; + } + return this.expires.getTime(); + } + + // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere), except it returns a Date + expiryDate(now) { + const millisec = this.expiryTime(now); + if (millisec == Infinity) { + return new Date(MAX_TIME); + } else if (millisec == -Infinity) { + return new Date(MIN_TIME); + } else { + return new Date(millisec); + } + } + + // This replaces the "persistent-flag" parts of S5.3 step 3 + isPersistent() { + return this.maxAge != null || this.expires != Infinity; + } + + // Mostly S5.1.2 and S5.2.3: + canonicalizedDomain() { + if (this.domain == null) { + return null; + } + return canonicalDomain(this.domain); + } + + cdomain() { + return this.canonicalizedDomain(); + } +} + +Cookie.cookiesCreated = 0; +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; +Cookie.serializableProperties = Object.keys(cookieDefaults); +Cookie.sameSiteLevel = { + strict: 3, + lax: 2, + none: 1 +}; + +Cookie.sameSiteCanonical = { + strict: "Strict", + lax: "Lax" +}; + +function getNormalizedPrefixSecurity(prefixSecurity) { + if (prefixSecurity != null) { + const normalizedPrefixSecurity = prefixSecurity.toLowerCase(); + /* The three supported options */ + switch (normalizedPrefixSecurity) { + case PrefixSecurityEnum.STRICT: + case PrefixSecurityEnum.SILENT: + case PrefixSecurityEnum.DISABLED: + return normalizedPrefixSecurity; + } + } + /* Default is SILENT */ + return PrefixSecurityEnum.SILENT; +} + +class CookieJar { + constructor(store, options = { rejectPublicSuffixes: true }) { + if (typeof options === "boolean") { + options = { rejectPublicSuffixes: options }; + } + this.rejectPublicSuffixes = options.rejectPublicSuffixes; + this.enableLooseMode = !!options.looseMode; + this.allowSpecialUseDomain = !!options.allowSpecialUseDomain; + this.store = store || new MemoryCookieStore(); + this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity); + this._cloneSync = syncWrap("clone"); + this._importCookiesSync = syncWrap("_importCookies"); + this.getCookiesSync = syncWrap("getCookies"); + this.getCookieStringSync = syncWrap("getCookieString"); + this.getSetCookieStringsSync = syncWrap("getSetCookieStrings"); + this.removeAllCookiesSync = syncWrap("removeAllCookies"); + this.setCookieSync = syncWrap("setCookie"); + this.serializeSync = syncWrap("serialize"); + } + + setCookie(cookie, url, options, cb) { + let err; + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const loose = options.loose || this.enableLooseMode; + + let sameSiteContext = null; + if (options.sameSiteContext) { + sameSiteContext = checkSameSiteContext(options.sameSiteContext); + if (!sameSiteContext) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + // S5.3 step 1 + if (typeof cookie === "string" || cookie instanceof String) { + cookie = Cookie.parse(cookie, { loose: loose }); + if (!cookie) { + err = new Error("Cookie failed to parse"); + return cb(options.ignoreError ? null : err); + } + } else if (!(cookie instanceof Cookie)) { + // If you're seeing this error, and are passing in a Cookie object, + // it *might* be a Cookie object from another loaded version of tough-cookie. + err = new Error( + "First argument to setCookie must be a Cookie object or string" + ); + return cb(options.ignoreError ? null : err); + } + + // S5.3 step 2 + const now = options.now || new Date(); // will assign later to save effort in the face of errors + + // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() + + // S5.3 step 4: NOOP; domain is null by default + + // S5.3 step 5: public suffixes + if (this.rejectPublicSuffixes && cookie.domain) { + const suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); + if (suffix == null) { + // e.g. "com" + err = new Error("Cookie has domain set to a public suffix"); + return cb(options.ignoreError ? null : err); + } + } + + // S5.3 step 6: + if (cookie.domain) { + if (!domainMatch(host, cookie.cdomain(), false)) { + err = new Error( + `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}` + ); + return cb(options.ignoreError ? null : err); + } + + if (cookie.hostOnly == null) { + // don't reset if already set + cookie.hostOnly = false; + } + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== "/") { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } + + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute + + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + + // 6252bis-02 S5.4 Step 13 & 14: + if (cookie.sameSite !== "none" && sameSiteContext) { + // "If the cookie's "same-site-flag" is not "None", and the cookie + // is being set from a context whose "site for cookies" is not an + // exact match for request-uri's host's registered domain, then + // abort these steps and ignore the newly created cookie entirely." + if (sameSiteContext === "none") { + err = new Error( + "Cookie is SameSite but this is a cross-origin request" + ); + return cb(options.ignoreError ? null : err); + } + } + + /* 6265bis-02 S5.4 Steps 15 & 16 */ + const ignoreErrorForPrefixSecurity = + this.prefixSecurity === PrefixSecurityEnum.SILENT; + const prefixSecurityDisabled = + this.prefixSecurity === PrefixSecurityEnum.DISABLED; + /* If prefix checking is not disabled ...*/ + if (!prefixSecurityDisabled) { + let errorFound = false; + let errorMsg; + /* Check secure prefix condition */ + if (!isSecurePrefixConditionMet(cookie)) { + errorFound = true; + errorMsg = "Cookie has __Secure prefix but Secure attribute is not set"; + } else if (!isHostPrefixConditionMet(cookie)) { + /* Check host prefix condition */ + errorFound = true; + errorMsg = + "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'"; + } + if (errorFound) { + return cb( + options.ignoreError || ignoreErrorForPrefixSecurity + ? null + : new Error(errorMsg) + ); + } + } + + const store = this.store; + + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } + + function withCookie(err, oldCookie) { + if (err) { + return cb(err); + } + + const next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; + + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { + // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 + } + } + + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); + } + + // RFC6365 S5.4 + getCookies(url, options, cb) { + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const path = context.pathname || "/"; + + let secure = options.secure; + if ( + secure == null && + context.protocol && + (context.protocol == "https:" || context.protocol == "wss:") + ) { + secure = true; + } + + let sameSiteLevel = 0; + if (options.sameSiteContext) { + const sameSiteContext = checkSameSiteContext(options.sameSiteContext); + sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext]; + if (!sameSiteLevel) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + let http = options.http; + if (http == null) { + http = true; + } + + const now = options.now || Date.now(); + const expireCheck = options.expire !== false; + const allPaths = !!options.allPaths; + const store = this.store; + + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } + } + + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; + } + + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; + } + + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; + } + + // RFC6265bis-02 S5.3.7 + if (sameSiteLevel) { + const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"]; + if (cookieLevel > sameSiteLevel) { + // only allow cookies at or below the request level + return false; + } + } + + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored + return false; + } + + return true; + } + + store.findCookies( + host, + allPaths ? null : path, + this.allowSpecialUseDomain, + (err, cookies) => { + if (err) { + return cb(err); + } + + cookies = cookies.filter(matchingCookie); + + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } + + // S5.4 part 3 + const now = new Date(); + for (const cookie of cookies) { + cookie.lastAccessed = now; + } + // TODO persist lastAccessed + + cb(null, cookies); + } + ); + } + + getCookieString(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies + .sort(cookieCompare) + .map(c => c.cookieString()) + .join("; ") + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + getSetCookieStrings(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies.map(c => { + return c.toString(); + }) + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + serialize(cb) { + let type = this.store.constructor.name; + if (type === "Object") { + type = null; + } + + // update README.md "Serialization Format" if you change this, please! + const serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: `tough-cookie@${VERSION}`, + + // add the store type, to make humans happy: + storeType: type, + + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, + + // this gets filled from getAllCookies: + cookies: [] + }; + + if ( + !( + this.store.getAllCookies && + typeof this.store.getAllCookies === "function" + ) + ) { + return cb( + new Error( + "store does not support getAllCookies and cannot be serialized" + ) + ); + } + + this.store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + serialized.cookies = cookies.map(cookie => { + // convert to serialized 'raw' cookies + cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie; + + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; + + return cookie; + }); + + return cb(null, serialized); + }); + } + + toJSON() { + return this.serializeSync(); + } + + // use the class method CookieJar.deserialize instead of calling this directly + _importCookies(serialized, cb) { + let cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error("serialized jar has no cookies array")); + } + cookies = cookies.slice(); // do not modify the original + + const putNext = err => { + if (err) { + return cb(err); + } + + if (!cookies.length) { + return cb(err, this); + } + + let cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } + + if (cookie === null) { + return putNext(null); // skip this cookie + } + + this.store.putCookie(cookie, putNext); + }; + + putNext(); + } + + clone(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } + + this.serialize((err, serialized) => { + if (err) { + return cb(err); + } + CookieJar.deserialize(serialized, newStore, cb); + }); + } + + cloneSync(newStore) { + if (arguments.length === 0) { + return this._cloneSync(); + } + if (!newStore.synchronous) { + throw new Error( + "CookieJar clone destination store is not synchronous; use async API instead." + ); + } + return this._cloneSync(newStore); + } + + removeAllCookies(cb) { + const store = this.store; + + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if ( + typeof store.removeAllCookies === "function" && + store.removeAllCookies !== Store.prototype.removeAllCookies + ) { + return store.removeAllCookies(cb); + } + + store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + if (cookies.length === 0) { + return cb(null); + } + + let completedCount = 0; + const removeErrors = []; + + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } + + completedCount++; + + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } + + cookies.forEach(cookie => { + store.removeCookie( + cookie.domain, + cookie.path, + cookie.key, + removeCookieCb + ); + }); + }); + } + + static deserialize(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; + } + + let serialized; + if (typeof strOrObj === "string") { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); + } + } else { + serialized = strOrObj; + } + + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, err => { + if (err) { + return cb(err); + } + cb(null, jar); + }); + } + + static deserializeSync(strOrObj, store) { + const serialized = + typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj; + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + jar._importCookiesSync(serialized); + return jar; + } +} +CookieJar.fromJSON = CookieJar.deserializeSync; + +[ + "_importCookies", + "clone", + "getCookies", + "getCookieString", + "getSetCookieStrings", + "removeAllCookies", + "serialize", + "setCookie" +].forEach(name => { + CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]); +}); +CookieJar.deserialize = fromCallback(CookieJar.deserialize); + +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function(...args) { + if (!this.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + let syncErr, syncResult; + this[method](...args, (err, result) => { + syncErr = err; + syncResult = result; + }); + + if (syncErr) { + throw syncErr; + } + return syncResult; + }; +} + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = require("./permuteDomain").permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; +exports.PrefixSecurityEnum = PrefixSecurityEnum; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js new file mode 100644 index 00000000..912eead3 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js @@ -0,0 +1,190 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const { fromCallback } = require("universalify"); +const Store = require("./store").Store; +const permuteDomain = require("./permuteDomain").permuteDomain; +const pathMatch = require("./pathMatch").pathMatch; +const util = require("util"); + +class MemoryCookieStore extends Store { + constructor() { + super(); + this.synchronous = true; + this.idx = {}; + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + } + + inspect() { + return `{ idx: ${util.inspect(this.idx, false, 2)} }`; + } + + findCookie(domain, path, key, cb) { + if (!this.idx[domain]) { + return cb(null, undefined); + } + if (!this.idx[domain][path]) { + return cb(null, undefined); + } + return cb(null, this.idx[domain][path][key] || null); + } + findCookies(domain, path, allowSpecialUseDomain, cb) { + const results = []; + if (typeof allowSpecialUseDomain === "function") { + cb = allowSpecialUseDomain; + allowSpecialUseDomain = false; + } + if (!domain) { + return cb(null, []); + } + + let pathMatcher; + if (!path) { + // null means "all paths" + pathMatcher = function matchAll(domainIndex) { + for (const curPath in domainIndex) { + const pathIndex = domainIndex[curPath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }; + } else { + pathMatcher = function matchRFC(domainIndex) { + //NOTE: we should use path-match algorithm from S5.1.4 here + //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) + Object.keys(domainIndex).forEach(cookiePath => { + if (pathMatch(path, cookiePath)) { + const pathIndex = domainIndex[cookiePath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }); + }; + } + + const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain]; + const idx = this.idx; + domains.forEach(curDomain => { + const domainIndex = idx[curDomain]; + if (!domainIndex) { + return; + } + pathMatcher(domainIndex); + }); + + cb(null, results); + } + + putCookie(cookie, cb) { + if (!this.idx[cookie.domain]) { + this.idx[cookie.domain] = {}; + } + if (!this.idx[cookie.domain][cookie.path]) { + this.idx[cookie.domain][cookie.path] = {}; + } + this.idx[cookie.domain][cookie.path][cookie.key] = cookie; + cb(null); + } + updateCookie(oldCookie, newCookie, cb) { + // updateCookie() may avoid updating cookies that are identical. For example, + // lastAccessed may not be important to some stores and an equality + // comparison could exclude that field. + this.putCookie(newCookie, cb); + } + removeCookie(domain, path, key, cb) { + if ( + this.idx[domain] && + this.idx[domain][path] && + this.idx[domain][path][key] + ) { + delete this.idx[domain][path][key]; + } + cb(null); + } + removeCookies(domain, path, cb) { + if (this.idx[domain]) { + if (path) { + delete this.idx[domain][path]; + } else { + delete this.idx[domain]; + } + } + return cb(null); + } + removeAllCookies(cb) { + this.idx = {}; + return cb(null); + } + getAllCookies(cb) { + const cookies = []; + const idx = this.idx; + + const domains = Object.keys(idx); + domains.forEach(domain => { + const paths = Object.keys(idx[domain]); + paths.forEach(path => { + const keys = Object.keys(idx[domain][path]); + keys.forEach(key => { + if (key !== null) { + cookies.push(idx[domain][path][key]); + } + }); + }); + }); + + // Sort by creationIndex so deserializing retains the creation order. + // When implementing your own store, this SHOULD retain the order too + cookies.sort((a, b) => { + return (a.creationIndex || 0) - (b.creationIndex || 0); + }); + + cb(null, cookies); + } +} + +[ + "findCookie", + "findCookies", + "putCookie", + "updateCookie", + "removeCookie", + "removeCookies", + "removeAllCookies", + "getAllCookies" +].forEach(name => { + MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]); +}); + +exports.MemoryCookieStore = MemoryCookieStore; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js new file mode 100644 index 00000000..16ff63ee --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js @@ -0,0 +1,61 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +/* + * "A request-path path-matches a given cookie-path if at least one of the + * following conditions holds:" + */ +function pathMatch(reqPath, cookiePath) { + // "o The cookie-path and the request-path are identical." + if (cookiePath === reqPath) { + return true; + } + + const idx = reqPath.indexOf(cookiePath); + if (idx === 0) { + // "o The cookie-path is a prefix of the request-path, and the last + // character of the cookie-path is %x2F ("/")." + if (cookiePath.substr(-1) === "/") { + return true; + } + + // " o The cookie-path is a prefix of the request-path, and the first + // character of the request-path that is not included in the cookie- path + // is a %x2F ("/") character." + if (reqPath.substr(cookiePath.length, 1) === "/") { + return true; + } + } + + return false; +} + +exports.pathMatch = pathMatch; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js new file mode 100644 index 00000000..78e6cad5 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js @@ -0,0 +1,70 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const pubsuffix = require("./pubsuffix-psl"); + +// Gives the permutation of all possible domainMatch()es of a given domain. The +// array is in shortest-to-longest order. Handy for indexing. +const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761 +function permuteDomain(domain, allowSpecialUseDomain) { + let pubSuf = null; + if (allowSpecialUseDomain) { + const domainParts = domain.split("."); + if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) { + pubSuf = `${domainParts[domainParts.length - 2]}.${ + domainParts[domainParts.length - 1] + }`; + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + + if (!pubSuf) { + return null; + } + if (pubSuf == domain) { + return [domain]; + } + + const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" + const parts = prefix.split(".").reverse(); + let cur = pubSuf; + const permutations = [cur]; + while (parts.length) { + cur = `${parts.shift()}.${cur}`; + permutations.push(cur); + } + return permutations; +} + +exports.permuteDomain = permuteDomain; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js new file mode 100644 index 00000000..93a8577c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js @@ -0,0 +1,38 @@ +/*! + * Copyright (c) 2018, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const psl = require("psl"); + +function getPublicSuffix(domain) { + return psl.get(domain); +} + +exports.getPublicSuffix = getPublicSuffix; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js new file mode 100644 index 00000000..2ed0259e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js @@ -0,0 +1,76 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +/*jshint unused:false */ + +class Store { + constructor() { + this.synchronous = false; + } + + findCookie(domain, path, key, cb) { + throw new Error("findCookie is not implemented"); + } + + findCookies(domain, path, allowSpecialUseDomain, cb) { + throw new Error("findCookies is not implemented"); + } + + putCookie(cookie, cb) { + throw new Error("putCookie is not implemented"); + } + + updateCookie(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error("updateCookie is not implemented"); + } + + removeCookie(domain, path, key, cb) { + throw new Error("removeCookie is not implemented"); + } + + removeCookies(domain, path, cb) { + throw new Error("removeCookies is not implemented"); + } + + removeAllCookies(cb) { + throw new Error("removeAllCookies is not implemented"); + } + + getAllCookies(cb) { + throw new Error( + "getAllCookies is not implemented (therefore jar cannot be serialized)" + ); + } +} + +exports.Store = Store; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js new file mode 100644 index 00000000..e52f25be --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js @@ -0,0 +1,2 @@ +// generated by genversion +module.exports = '4.0.0' diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/package.json b/node_modules/@azure/core-http/node_modules/tough-cookie/package.json new file mode 100644 index 00000000..e07dcb7c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/package.json @@ -0,0 +1,109 @@ +{ + "author": { + "name": "Jeremy Stashewsky", + "email": "jstash@gmail.com", + "website": "https://github.com/stash" + }, + "contributors": [ + { + "name": "Ivan Nikulin", + "website": "https://github.com/inikulin" + }, + { + "name": "Shivan Kaul Sahib", + "website": "https://github.com/ShivanKaul" + }, + { + "name": "Clint Ruoho", + "website": "https://github.com/ruoho" + }, + { + "name": "Ian Livingstone", + "website": "https://github.com/ianlivingstone" + }, + { + "name": "Andrew Waterman", + "website": "https://github.com/awaterma" + }, + { + "name": "Michael de Libero ", + "website": "https://github.com/medelibero-sfdc" + }, + { + "name": "Jonathan Stewmon", + "website": "https://github.com/jstewmon" + }, + { + "name": "Miguel Roncancio", + "website": "https://github.com/miggs125" + }, + { + "name": "Sebastian Mayr", + "website": "https://github.com/Sebmaster" + }, + { + "name": "Alexander Savin", + "website": "https://github.com/apsavin" + }, + { + "name": "Lalit Kapoor", + "website": "https://github.com/lalitkapoor" + }, + { + "name": "Sam Thompson", + "website": "https://github.com/sambthompson" + } + ], + "license": "BSD-3-Clause", + "name": "tough-cookie", + "description": "RFC6265 Cookies and Cookie Jar for node.js", + "keywords": [ + "HTTP", + "cookie", + "cookies", + "set-cookie", + "cookiejar", + "jar", + "RFC6265", + "RFC2965" + ], + "version": "4.0.0", + "homepage": "https://github.com/salesforce/tough-cookie", + "repository": { + "type": "git", + "url": "git://github.com/salesforce/tough-cookie.git" + }, + "bugs": { + "url": "https://github.com/salesforce/tough-cookie/issues" + }, + "main": "./lib/cookie", + "files": [ + "lib" + ], + "scripts": { + "version": "genversion lib/version.js && git add lib/version.js", + "test": "vows test/*_test.js", + "cover": "nyc --reporter=lcov --reporter=html vows test/*_test.js", + "eslint": "eslint --env node --ext .js .", + "prettier": "prettier '**/*.{json,ts,yaml,md}'", + "format": "npm run eslint -- --fix" + }, + "engines": { + "node": ">=6" + }, + "devDependencies": { + "async": "^2.6.2", + "eslint": "^5.16.0", + "eslint-config-prettier": "^4.2.0", + "eslint-plugin-prettier": "^3.0.1", + "genversion": "^2.1.0", + "nyc": "^14.0.0", + "prettier": "^1.17.0", + "vows": "^0.8.2" + }, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + } +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/README.md b/node_modules/@azure/core-http/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-http/node_modules/tslib/modules/index.js b/node_modules/@azure/core-http/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-http/node_modules/tslib/modules/package.json b/node_modules/@azure/core-http/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/package.json b/node_modules/@azure/core-http/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.html b/node_modules/@azure/core-http/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.js b/node_modules/@azure/core-http/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-http/node_modules/uuid/CHANGELOG.md b/node_modules/@azure/core-http/node_modules/uuid/CHANGELOG.md new file mode 100644 index 00000000..7519d19d --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,229 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/@azure/core-http/node_modules/uuid/CONTRIBUTING.md b/node_modules/@azure/core-http/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 00000000..4a4503d0 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/@azure/core-http/node_modules/uuid/LICENSE.md b/node_modules/@azure/core-http/node_modules/uuid/LICENSE.md new file mode 100644 index 00000000..39341683 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@azure/core-http/node_modules/uuid/README.md b/node_modules/@azure/core-http/node_modules/uuid/README.md new file mode 100644 index 00000000..ed27e576 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/README.md @@ -0,0 +1,505 @@ + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - Node 8, 10, 12, 14 + - Chrome, Safari, Firefox, Edge, IE 11 browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +**Upgrading from `uuid@3.x`?** Your code is probably okay, but check out [Upgrading From `uuid@3.x`](#upgrading-from-uuid3x) for details. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, + 0xc0, + 0xbd, + 0x7f, + 0x11, + 0xc0, + 0x43, + 0xda, + 0x97, + 0x5e, + 0x2a, + 0x8a, + 0xd9, + 0xeb, + 0xae, + 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, + 0x91, + 0x56, + 0xbe, + 0xc4, + 0xfb, + 0xc1, + 0xea, + 0x71, + 0xb4, + 0xef, + 0xe1, + 0x67, + 0x1c, + 0x58, + 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +To load this module directly into older browsers you can use the [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds from any of the following CDNs: + +**Using [UNPKG](https://unpkg.com/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [jsDelivr](https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [cdnjs](https://cdnjs.com/libraries/uuid)**: + +```html + +``` + +These CDNs all provide the same [`uuidv4()`](#uuidv4options-buffer-offset) method: + +```html + +``` + +Methods for the other algorithms ([`uuidv1()`](#uuidv1options-buffer-offset), [`uuidv3()`](#uuidv3name-namespace-buffer-offset) and [`uuidv5()`](#uuidv5name-namespace-buffer-offset)) are available from the files `uuidv1.min.js`, `uuidv3.min.js` and `uuidv5.min.js` respectively. + +## "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +## Upgrading From `uuid@7.x` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7.x` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7.x`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3.x` + +"_Wait... what happened to `uuid@4.x` - `uuid@6.x`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3.x` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7.x` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3.x` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3.x` and has been removed in `uuid@7.x`. + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/bin/uuid b/node_modules/@azure/core-http/node_modules/uuid/dist/bin/uuid new file mode 100755 index 00000000..f38d2ee1 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/index.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 00000000..8b5d46a7 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (var i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + var output = []; + var length32 = input.length * 32; + var hexTab = '0123456789abcdef'; + + for (var i = 0; i < length32; i += 8) { + var x = input[i >> 5] >>> i % 32 & 0xff; + var hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + var a = 1732584193; + var b = -271733879; + var c = -1732584194; + var d = 271733878; + + for (var i = 0; i < x.length; i += 16) { + var olda = a; + var oldb = b; + var oldc = c; + var oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + var length8 = input.length * 8; + var output = new Uint32Array(getOutputLength(length8)); + + for (var i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + var lsw = (x & 0xffff) + (y & 0xffff); + var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 00000000..7c5b1d5a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + var v; + var arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 00000000..8abbf2ea --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,19 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +var getRandomValues; +var rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 00000000..940548ba --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (var i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + var l = bytes.length / 4 + 2; + var N = Math.ceil(l / 16); + var M = new Array(N); + + for (var _i = 0; _i < N; ++_i) { + var arr = new Uint32Array(16); + + for (var j = 0; j < 16; ++j) { + arr[j] = bytes[_i * 64 + j * 4] << 24 | bytes[_i * 64 + j * 4 + 1] << 16 | bytes[_i * 64 + j * 4 + 2] << 8 | bytes[_i * 64 + j * 4 + 3]; + } + + M[_i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (var _i2 = 0; _i2 < N; ++_i2) { + var W = new Uint32Array(80); + + for (var t = 0; t < 16; ++t) { + W[t] = M[_i2][t]; + } + + for (var _t = 16; _t < 80; ++_t) { + W[_t] = ROTL(W[_t - 3] ^ W[_t - 8] ^ W[_t - 14] ^ W[_t - 16], 1); + } + + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + + for (var _t2 = 0; _t2 < 80; ++_t2) { + var s = Math.floor(_t2 / 20); + var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[_t2] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 00000000..31021115 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,30 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +var byteToHex = []; + +for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 00000000..1a22591e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; + +var _clockseq; // Previous uuid creation time + + +var _lastMSecs = 0; +var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || new Array(16); + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + var seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + var msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + var tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 00000000..c9ab9a4c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +var v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 00000000..31dd8a1c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + var bytes = []; + + for (var i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export var DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export var URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + var bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 00000000..404810a4 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 00000000..c08d96ba --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +var v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/version.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 00000000..77530e9c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 00000000..4d68b040 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 00000000..80062449 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 00000000..e23850b4 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 00000000..f9bca120 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,29 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 00000000..ebf81acb --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 00000000..22f6a196 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 00000000..efad926f --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 00000000..77530e9c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/index.js b/node_modules/@azure/core-http/node_modules/uuid/dist/index.js new file mode 100644 index 00000000..bf13b103 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/md5-browser.js b/node_modules/@azure/core-http/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/md5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/md5.js new file mode 100644 index 00000000..824d4816 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/nil.js b/node_modules/@azure/core-http/node_modules/uuid/dist/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/parse.js b/node_modules/@azure/core-http/node_modules/uuid/dist/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/regex.js b/node_modules/@azure/core-http/node_modules/uuid/dist/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/rng-browser.js b/node_modules/@azure/core-http/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 00000000..91faeae6 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,26 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/rng.js b/node_modules/@azure/core-http/node_modules/uuid/dist/rng.js new file mode 100644 index 00000000..3507f937 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/sha1-browser.js b/node_modules/@azure/core-http/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/sha1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/sha1.js new file mode 100644 index 00000000..03bdd63c --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/stringify.js b/node_modules/@azure/core-http/node_modules/uuid/dist/stringify.js new file mode 100644 index 00000000..b8e75194 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/stringify.js @@ -0,0 +1,39 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuid.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuid.min.js new file mode 100644 index 00000000..639ca2f2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuid.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((r="undefined"!=typeof globalThis?globalThis:r||self).uuid={})}(this,(function(r){"use strict";var e,n=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(n)}var o=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function a(r){return"string"==typeof r&&o.test(r)}for(var i,u,f=[],s=0;s<256;++s)f.push((s+256).toString(16).substr(1));function c(r){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(f[r[e+0]]+f[r[e+1]]+f[r[e+2]]+f[r[e+3]]+"-"+f[r[e+4]]+f[r[e+5]]+"-"+f[r[e+6]]+f[r[e+7]]+"-"+f[r[e+8]]+f[r[e+9]]+"-"+f[r[e+10]]+f[r[e+11]]+f[r[e+12]]+f[r[e+13]]+f[r[e+14]]+f[r[e+15]]).toLowerCase();if(!a(n))throw TypeError("Stringified UUID is invalid");return n}var l=0,d=0;function v(r){if(!a(r))throw TypeError("Invalid UUID");var e,n=new Uint8Array(16);return n[0]=(e=parseInt(r.slice(0,8),16))>>>24,n[1]=e>>>16&255,n[2]=e>>>8&255,n[3]=255&e,n[4]=(e=parseInt(r.slice(9,13),16))>>>8,n[5]=255&e,n[6]=(e=parseInt(r.slice(14,18),16))>>>8,n[7]=255&e,n[8]=(e=parseInt(r.slice(19,23),16))>>>8,n[9]=255&e,n[10]=(e=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=e/4294967296&255,n[12]=e>>>24&255,n[13]=e>>>16&255,n[14]=e>>>8&255,n[15]=255&e,n}function p(r,e,n){function t(r,t,o,a){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],n=0;n>>9<<4)+1}function y(r,e){var n=(65535&r)+(65535&e);return(r>>16)+(e>>16)+(n>>16)<<16|65535&n}function g(r,e,n,t,o,a){return y((i=y(y(e,r),y(t,a)))<<(u=o)|i>>>32-u,n);var i,u}function m(r,e,n,t,o,a,i){return g(e&n|~e&t,r,e,o,a,i)}function w(r,e,n,t,o,a,i){return g(e&t|n&~t,r,e,o,a,i)}function b(r,e,n,t,o,a,i){return g(e^n^t,r,e,o,a,i)}function A(r,e,n,t,o,a,i){return g(n^(e|~t),r,e,o,a,i)}var U=p("v3",48,(function(r){if("string"==typeof r){var e=unescape(encodeURIComponent(r));r=new Uint8Array(e.length);for(var n=0;n>5]>>>o%32&255,i=parseInt(t.charAt(a>>>4&15)+t.charAt(15&a),16);e.push(i)}return e}(function(r,e){r[e>>5]|=128<>5]|=(255&r[t/8])<>>32-e}var R=p("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],n=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var t=unescape(encodeURIComponent(r));r=[];for(var o=0;o>>0;w=m,m=g,g=C(y,30)>>>0,y=h,h=U}n[0]=n[0]+h>>>0,n[1]=n[1]+y>>>0,n[2]=n[2]+g>>>0,n[3]=n[3]+m>>>0,n[4]=n[4]+w>>>0}return[n[0]>>24&255,n[0]>>16&255,n[0]>>8&255,255&n[0],n[1]>>24&255,n[1]>>16&255,n[1]>>8&255,255&n[1],n[2]>>24&255,n[2]>>16&255,n[2]>>8&255,255&n[2],n[3]>>24&255,n[3]>>16&255,n[3]>>8&255,255&n[3],n[4]>>24&255,n[4]>>16&255,n[4]>>8&255,255&n[4]]}));r.NIL="00000000-0000-0000-0000-000000000000",r.parse=v,r.stringify=c,r.v1=function(r,e,n){var o=e&&n||0,a=e||new Array(16),f=(r=r||{}).node||i,s=void 0!==r.clockseq?r.clockseq:u;if(null==f||null==s){var v=r.random||(r.rng||t)();null==f&&(f=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==s&&(s=u=16383&(v[6]<<8|v[7]))}var p=void 0!==r.msecs?r.msecs:Date.now(),h=void 0!==r.nsecs?r.nsecs:d+1,y=p-l+(h-d)/1e4;if(y<0&&void 0===r.clockseq&&(s=s+1&16383),(y<0||p>l)&&void 0===r.nsecs&&(h=0),h>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");l=p,d=h,u=s;var g=(1e4*(268435455&(p+=122192928e5))+h)%4294967296;a[o++]=g>>>24&255,a[o++]=g>>>16&255,a[o++]=g>>>8&255,a[o++]=255&g;var m=p/4294967296*1e4&268435455;a[o++]=m>>>8&255,a[o++]=255&m,a[o++]=m>>>24&15|16,a[o++]=m>>>16&255,a[o++]=s>>>8|128,a[o++]=255&s;for(var w=0;w<6;++w)a[o+w]=f[w];return e||c(a)},r.v3=U,r.v4=function(r,e,n){var o=(r=r||{}).random||(r.rng||t)();if(o[6]=15&o[6]|64,o[8]=63&o[8]|128,e){n=n||0;for(var a=0;a<16;++a)e[n+a]=o[a];return e}return c(o)},r.v5=R,r.validate=a,r.version=function(r){if(!a(r))throw TypeError("Invalid UUID");return parseInt(r.substr(14,1),16)},Object.defineProperty(r,"__esModule",{value:!0})})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidNIL.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidNIL.min.js new file mode 100644 index 00000000..30b28a7e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidNIL.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidNIL=n()}(this,(function(){"use strict";return"00000000-0000-0000-0000-000000000000"})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidParse.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidParse.min.js new file mode 100644 index 00000000..d48ea6af --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidParse.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidParse=n()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(n){if(!function(n){return"string"==typeof n&&e.test(n)}(n))throw TypeError("Invalid UUID");var t,i=new Uint8Array(16);return i[0]=(t=parseInt(n.slice(0,8),16))>>>24,i[1]=t>>>16&255,i[2]=t>>>8&255,i[3]=255&t,i[4]=(t=parseInt(n.slice(9,13),16))>>>8,i[5]=255&t,i[6]=(t=parseInt(n.slice(14,18),16))>>>8,i[7]=255&t,i[8]=(t=parseInt(n.slice(19,23),16))>>>8,i[9]=255&t,i[10]=(t=parseInt(n.slice(24,36),16))/1099511627776&255,i[11]=t/4294967296&255,i[12]=t>>>24&255,i[13]=t>>>16&255,i[14]=t>>>8&255,i[15]=255&t,i}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidStringify.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidStringify.min.js new file mode 100644 index 00000000..fd39adc3 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidStringify.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidStringify=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function t(t){return"string"==typeof t&&e.test(t)}for(var i=[],n=0;n<256;++n)i.push((n+256).toString(16).substr(1));return function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,f=(i[e[n+0]]+i[e[n+1]]+i[e[n+2]]+i[e[n+3]]+"-"+i[e[n+4]]+i[e[n+5]]+"-"+i[e[n+6]]+i[e[n+7]]+"-"+i[e[n+8]]+i[e[n+9]]+"-"+i[e[n+10]]+i[e[n+11]]+i[e[n+12]]+i[e[n+13]]+i[e[n+14]]+i[e[n+15]]).toLowerCase();if(!t(f))throw TypeError("Stringified UUID is invalid");return f}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidValidate.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidValidate.min.js new file mode 100644 index 00000000..378e5b90 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidValidate.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidValidate=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){return"string"==typeof t&&e.test(t)}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidVersion.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidVersion.min.js new file mode 100644 index 00000000..274bb090 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidVersion.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidVersion=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){if(!function(t){return"string"==typeof t&&e.test(t)}(t))throw TypeError("Invalid UUID");return parseInt(t.substr(14,1),16)}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv1.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv1.min.js new file mode 100644 index 00000000..2622889a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv1.min.js @@ -0,0 +1 @@ +!function(e,o){"object"==typeof exports&&"undefined"!=typeof module?module.exports=o():"function"==typeof define&&define.amd?define(o):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidv1=o()}(this,(function(){"use strict";var e,o=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(o)}var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(e){return"string"==typeof e&&n.test(e)}for(var i,u,s=[],a=0;a<256;++a)s.push((a+256).toString(16).substr(1));var d=0,f=0;return function(e,o,n){var a=o&&n||0,c=o||new Array(16),l=(e=e||{}).node||i,p=void 0!==e.clockseq?e.clockseq:u;if(null==l||null==p){var v=e.random||(e.rng||t)();null==l&&(l=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==p&&(p=u=16383&(v[6]<<8|v[7]))}var y=void 0!==e.msecs?e.msecs:Date.now(),m=void 0!==e.nsecs?e.nsecs:f+1,g=y-d+(m-f)/1e4;if(g<0&&void 0===e.clockseq&&(p=p+1&16383),(g<0||y>d)&&void 0===e.nsecs&&(m=0),m>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");d=y,f=m,u=p;var h=(1e4*(268435455&(y+=122192928e5))+m)%4294967296;c[a++]=h>>>24&255,c[a++]=h>>>16&255,c[a++]=h>>>8&255,c[a++]=255&h;var w=y/4294967296*1e4&268435455;c[a++]=w>>>8&255,c[a++]=255&w,c[a++]=w>>>24&15|16,c[a++]=w>>>16&255,c[a++]=p>>>8|128,c[a++]=255&p;for(var b=0;b<6;++b)c[a+b]=l[b];return o||function(e){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,t=(s[e[o+0]]+s[e[o+1]]+s[e[o+2]]+s[e[o+3]]+"-"+s[e[o+4]]+s[e[o+5]]+"-"+s[e[o+6]]+s[e[o+7]]+"-"+s[e[o+8]]+s[e[o+9]]+"-"+s[e[o+10]]+s[e[o+11]]+s[e[o+12]]+s[e[o+13]]+s[e[o+14]]+s[e[o+15]]).toLowerCase();if(!r(t))throw TypeError("Stringified UUID is invalid");return t}(c)}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv3.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv3.min.js new file mode 100644 index 00000000..8d37b62d --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv3.min.js @@ -0,0 +1 @@ +!function(n,r){"object"==typeof exports&&"undefined"!=typeof module?module.exports=r():"function"==typeof define&&define.amd?define(r):(n="undefined"!=typeof globalThis?globalThis:n||self).uuidv3=r()}(this,(function(){"use strict";var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(r){return"string"==typeof r&&n.test(r)}for(var e=[],t=0;t<256;++t)e.push((t+256).toString(16).substr(1));function i(n){return 14+(n+64>>>9<<4)+1}function o(n,r){var e=(65535&n)+(65535&r);return(n>>16)+(r>>16)+(e>>16)<<16|65535&e}function a(n,r,e,t,i,a){return o((f=o(o(r,n),o(t,a)))<<(u=i)|f>>>32-u,e);var f,u}function f(n,r,e,t,i,o,f){return a(r&e|~r&t,n,r,i,o,f)}function u(n,r,e,t,i,o,f){return a(r&t|e&~t,n,r,i,o,f)}function c(n,r,e,t,i,o,f){return a(r^e^t,n,r,i,o,f)}function s(n,r,e,t,i,o,f){return a(e^(r|~t),n,r,i,o,f)}return function(n,t,i){function o(n,o,a,f){if("string"==typeof n&&(n=function(n){n=unescape(encodeURIComponent(n));for(var r=[],e=0;e>>24,t[1]=e>>>16&255,t[2]=e>>>8&255,t[3]=255&e,t[4]=(e=parseInt(n.slice(9,13),16))>>>8,t[5]=255&e,t[6]=(e=parseInt(n.slice(14,18),16))>>>8,t[7]=255&e,t[8]=(e=parseInt(n.slice(19,23),16))>>>8,t[9]=255&e,t[10]=(e=parseInt(n.slice(24,36),16))/1099511627776&255,t[11]=e/4294967296&255,t[12]=e>>>24&255,t[13]=e>>>16&255,t[14]=e>>>8&255,t[15]=255&e,t}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var u=new Uint8Array(16+n.length);if(u.set(o),u.set(n,o.length),(u=i(u))[6]=15&u[6]|t,u[8]=63&u[8]|128,a){f=f||0;for(var c=0;c<16;++c)a[f+c]=u[c];return a}return function(n){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,i=(e[n[t+0]]+e[n[t+1]]+e[n[t+2]]+e[n[t+3]]+"-"+e[n[t+4]]+e[n[t+5]]+"-"+e[n[t+6]]+e[n[t+7]]+"-"+e[n[t+8]]+e[n[t+9]]+"-"+e[n[t+10]]+e[n[t+11]]+e[n[t+12]]+e[n[t+13]]+e[n[t+14]]+e[n[t+15]]).toLowerCase();if(!r(i))throw TypeError("Stringified UUID is invalid");return i}(u)}try{o.name=n}catch(n){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v3",48,(function(n){if("string"==typeof n){var r=unescape(encodeURIComponent(n));n=new Uint8Array(r.length);for(var e=0;e>5]>>>i%32&255,a=parseInt(t.charAt(o>>>4&15)+t.charAt(15&o),16);r.push(a)}return r}(function(n,r){n[r>>5]|=128<>5]|=(255&n[t/8])<1&&void 0!==arguments[1]?arguments[1]:0,o=(i[t[e+0]]+i[t[e+1]]+i[t[e+2]]+i[t[e+3]]+"-"+i[t[e+4]]+i[t[e+5]]+"-"+i[t[e+6]]+i[t[e+7]]+"-"+i[t[e+8]]+i[t[e+9]]+"-"+i[t[e+10]]+i[t[e+11]]+i[t[e+12]]+i[t[e+13]]+i[t[e+14]]+i[t[e+15]]).toLowerCase();if(!r(o))throw TypeError("Stringified UUID is invalid");return o}(u)}})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv5.min.js b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv5.min.js new file mode 100644 index 00000000..ba6fc63d --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/umd/uuidv5.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(r="undefined"!=typeof globalThis?globalThis:r||self).uuidv5=e()}(this,(function(){"use strict";var r=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function e(e){return"string"==typeof e&&r.test(e)}for(var t=[],n=0;n<256;++n)t.push((n+256).toString(16).substr(1));function a(r,e,t,n){switch(r){case 0:return e&t^~e&n;case 1:return e^t^n;case 2:return e&t^e&n^t&n;case 3:return e^t^n}}function o(r,e){return r<>>32-e}return function(r,n,a){function o(r,o,i,f){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],t=0;t>>24,n[1]=t>>>16&255,n[2]=t>>>8&255,n[3]=255&t,n[4]=(t=parseInt(r.slice(9,13),16))>>>8,n[5]=255&t,n[6]=(t=parseInt(r.slice(14,18),16))>>>8,n[7]=255&t,n[8]=(t=parseInt(r.slice(19,23),16))>>>8,n[9]=255&t,n[10]=(t=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=t/4294967296&255,n[12]=t>>>24&255,n[13]=t>>>16&255,n[14]=t>>>8&255,n[15]=255&t,n}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var s=new Uint8Array(16+r.length);if(s.set(o),s.set(r,o.length),(s=a(s))[6]=15&s[6]|n,s[8]=63&s[8]|128,i){f=f||0;for(var u=0;u<16;++u)i[f+u]=s[u];return i}return function(r){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,a=(t[r[n+0]]+t[r[n+1]]+t[r[n+2]]+t[r[n+3]]+"-"+t[r[n+4]]+t[r[n+5]]+"-"+t[r[n+6]]+t[r[n+7]]+"-"+t[r[n+8]]+t[r[n+9]]+"-"+t[r[n+10]]+t[r[n+11]]+t[r[n+12]]+t[r[n+13]]+t[r[n+14]]+t[r[n+15]]).toLowerCase();if(!e(a))throw TypeError("Stringified UUID is invalid");return a}(s)}try{o.name=r}catch(r){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],t=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var n=unescape(encodeURIComponent(r));r=[];for(var i=0;i>>0;A=U,U=w,w=o(b,30)>>>0,b=g,g=C}t[0]=t[0]+g>>>0,t[1]=t[1]+b>>>0,t[2]=t[2]+w>>>0,t[3]=t[3]+U>>>0,t[4]=t[4]+A>>>0}return[t[0]>>24&255,t[0]>>16&255,t[0]>>8&255,255&t[0],t[1]>>24&255,t[1]>>16&255,t[1]>>8&255,255&t[1],t[2]>>24&255,t[2]>>16&255,t[2]>>8&255,255&t[2],t[3]>>24&255,t[3]>>16&255,t[3]>>8&255,255&t[3],t[4]>>24&255,t[4]>>16&255,t[4]>>8&255,255&t[4]]}))})); \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/uuid-bin.js b/node_modules/@azure/core-http/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 00000000..50a7a9f1 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/v1.js b/node_modules/@azure/core-http/node_modules/uuid/dist/v1.js new file mode 100644 index 00000000..abb9b3d1 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/v3.js b/node_modules/@azure/core-http/node_modules/uuid/dist/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/v35.js b/node_modules/@azure/core-http/node_modules/uuid/dist/v35.js new file mode 100644 index 00000000..f784c633 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/v35.js @@ -0,0 +1,78 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/v4.js b/node_modules/@azure/core-http/node_modules/uuid/dist/v4.js new file mode 100644 index 00000000..838ce0b2 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/v4.js @@ -0,0 +1,37 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/v5.js b/node_modules/@azure/core-http/node_modules/uuid/dist/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/validate.js b/node_modules/@azure/core-http/node_modules/uuid/dist/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/dist/version.js b/node_modules/@azure/core-http/node_modules/uuid/dist/version.js new file mode 100644 index 00000000..b72949cd --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/uuid/package.json b/node_modules/@azure/core-http/node_modules/uuid/package.json new file mode 100644 index 00000000..f0ab3711 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "8.3.2", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.11.6", + "@babel/core": "7.11.6", + "@babel/preset-env": "7.11.5", + "@commitlint/cli": "11.0.0", + "@commitlint/config-conventional": "11.0.0", + "@rollup/plugin-node-resolve": "9.0.0", + "babel-eslint": "10.1.0", + "bundlewatch": "0.3.1", + "eslint": "7.10.0", + "eslint-config-prettier": "6.12.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.22.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "3.1.4", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "husky": "4.3.0", + "jest": "25.5.4", + "lint-staged": "10.4.0", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.1.2", + "random-seed": "0.3.0", + "rollup": "2.28.2", + "rollup-plugin-terser": "7.0.2", + "runmd": "1.3.2", + "standard-version": "9.0.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "6.4.0", + "@wdio/cli": "6.4.0", + "@wdio/jasmine-framework": "6.4.0", + "@wdio/local-runner": "6.4.0", + "@wdio/spec-reporter": "6.4.0", + "@wdio/static-server-service": "6.4.0", + "@wdio/sync": "6.4.0" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjs node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --ignore-path .prettierignore --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --ignore-path .prettierignore --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v12' ) && ( npm run build && runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/@azure/core-http/node_modules/uuid/wrapper.mjs b/node_modules/@azure/core-http/node_modules/uuid/wrapper.mjs new file mode 100644 index 00000000..c31e9cef --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/@azure/core-http/package.json b/node_modules/@azure/core-http/package.json new file mode 100644 index 00000000..908db988 --- /dev/null +++ b/node_modules/@azure/core-http/package.json @@ -0,0 +1,178 @@ +{ + "name": "@azure/core-http", + "sdk-type": "client", + "author": "Microsoft Corporation", + "version": "2.2.5", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "engines": { + "node": ">=12.0.0" + }, + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime", + "azure", + "cloud" + ], + "main": "dist/index.js", + "module": "./dist-esm/src/coreHttp.js", + "types": "./types/latest/src/coreHttp.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/src/*": [ + "types/3.1/src/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "dom-shim.d.ts", + "types/*/src/**/*.d.ts", + "types/*/src/**/*.d.ts.map", + "README.md", + "LICENSE" + ], + "browser": { + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.browser.js", + "./dist-esm/src/policies/disableResponseDecompressionPolicy.js": "./dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js", + "./dist-esm/src/policies/proxyPolicy.js": "./dist-esm/src/policies/proxyPolicy.browser.js", + "./dist-esm/src/util/base64.js": "./dist-esm/src/util/base64.browser.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.browser.js", + "./dist-esm/src/defaultHttpClient.js": "./dist-esm/src/defaultHttpClient.browser.js", + "./dist-esm/src/util/inspect.js": "./dist-esm/src/util/inspect.browser.js" + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/coreHttp.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.js", + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.native.js" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p tsconfig.es.json && dev-tool run bundle", + "build:types": "downlevel-dts types/latest/ types/3.1/", + "build": "npm run clean && tsc -p tsconfig.es.json && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p tsconfig.es.json && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "cross-env TS_NODE_FILES=true mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 500000 --full-trace --exclude \"test/**/*.browser.ts\" \"test/**/*.ts\"" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + }, + "//metadata": { + "constantPaths": [ + { + "path": "src/util/constants.ts", + "prefix": "coreHttpVersion" + } + ] + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/logger-js": "^1.0.2", + "@microsoft/api-extractor": "7.18.11", + "@opentelemetry/api": "^1.0.1", + "@types/chai": "^4.1.6", + "@types/express": "^4.16.0", + "@types/glob": "^7.1.1", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/sinon": "^9.0.4", + "@types/tough-cookie": "^4.0.0", + "@types/uuid": "^8.0.0", + "@types/xml2js": "^0.4.11", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "express": "^4.16.3", + "fetch-mock": "^9.10.1", + "glob": "^7.1.2", + "karma": "^6.2.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^3.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^2.0.1", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "npm-run-all": "^4.1.5", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^13.5.1", + "regenerator-runtime": "^0.13.3", + "rimraf": "^3.0.0", + "shx": "^0.3.2", + "sinon": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "~4.6.0", + "uglify-js": "^3.4.9", + "xhr-mock": "^2.4.1" + } +} diff --git a/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts b/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts new file mode 100644 index 00000000..f25460c6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts @@ -0,0 +1,51 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "./util/delay"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts new file mode 100644 index 00000000..68a4bdd2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts new file mode 100644 index 00000000..7ea7d817 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 00000000..38451d52 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 00000000..9da89224 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 00000000..8ba0538f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts new file mode 100644 index 00000000..4be8a9f5 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 00000000..5897a2ef --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts new file mode 100644 index 00000000..a96bb0f0 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts new file mode 100644 index 00000000..26fcaefa --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts new file mode 100644 index 00000000..c4668588 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts new file mode 100644 index 00000000..6f853022 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts new file mode 100644 index 00000000..40c4cf13 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts new file mode 100644 index 00000000..1c033c65 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts new file mode 100644 index 00000000..70acd559 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts new file mode 100644 index 00000000..26520c46 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts new file mode 100644 index 00000000..1cf52847 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/log.d.ts b/node_modules/@azure/core-http/types/3.1/src/log.d.ts new file mode 100644 index 00000000..c757fef4 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts new file mode 100644 index 00000000..40bef940 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,64 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Pick> & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Pick> & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private readonly cookieJar; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts new file mode 100644 index 00000000..88327d29 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts new file mode 100644 index 00000000..d193c984 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts new file mode 100644 index 00000000..4b79aaee --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts new file mode 100644 index 00000000..bfe4a1f6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts new file mode 100644 index 00000000..ff8bd2a2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts new file mode 100644 index 00000000..edc81c5d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 00000000..2f06728a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts new file mode 100644 index 00000000..e088109e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 00000000..72def6e7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 00000000..73710e65 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 00000000..2662d917 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 00000000..62203ad7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 00000000..dd195c48 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts new file mode 100644 index 00000000..c6561afa --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts @@ -0,0 +1,77 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /* + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + + + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + allowedHeaderNames: Set; + /* + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + + + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + allowedQueryParameters: Set; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 00000000..09a257fb --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 00000000..2e50b8b4 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 00000000..57c6b438 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 00000000..72e4ed6d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 00000000..963560df --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts new file mode 100644 index 00000000..f202890e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts new file mode 100644 index 00000000..dcf0aa94 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts new file mode 100644 index 00000000..3fb5d15e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 00000000..f17bc67d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts new file mode 100644 index 00000000..577d72f0 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 00000000..cdb8a549 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 00000000..193dc257 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts new file mode 100644 index 00000000..793ef1c5 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts new file mode 100644 index 00000000..6a4b2f7e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts new file mode 100644 index 00000000..0d1cf99c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts @@ -0,0 +1,14 @@ +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts new file mode 100644 index 00000000..b68b15d9 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/restError.d.ts b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts new file mode 100644 index 00000000..960d2ff8 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts new file mode 100644 index 00000000..30eb1b59 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts @@ -0,0 +1,356 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts new file mode 100644 index 00000000..f77862cd --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/url.d.ts b/node_modules/@azure/core-http/types/3.1/src/url.d.ts new file mode 100644 index 00000000..986a955d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts new file mode 100644 index 00000000..94d313ee --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts new file mode 100644 index 00000000..1b7a4de7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts new file mode 100644 index 00000000..cfa1b24c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts new file mode 100644 index 00000000..fd55daec --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts @@ -0,0 +1,15 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export declare function delay(delayInMs: number, value?: T, options?: { + abortSignal?: AbortSignalLike; + abortErrorMsg?: string; +}): Promise; +//# sourceMappingURL=delay.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 00000000..f683083a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from "../coreHttp"; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts new file mode 100644 index 00000000..9f3d2110 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts new file mode 100644 index 00000000..d203786d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts new file mode 100644 index 00000000..972d5452 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts new file mode 100644 index 00000000..e3e4e2d2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 00000000..544c3a3d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts new file mode 100644 index 00000000..f6b32c66 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts @@ -0,0 +1,7 @@ +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +//# sourceMappingURL=typeguards.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts new file mode 100644 index 00000000..abffcd57 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts @@ -0,0 +1,131 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts new file mode 100644 index 00000000..dfba9cd3 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts new file mode 100644 index 00000000..42c90568 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts new file mode 100644 index 00000000..d5bb5292 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts new file mode 100644 index 00000000..f08fd80f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts new file mode 100644 index 00000000..b1aeeffb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts @@ -0,0 +1,51 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "./util/delay"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map new file mode 100644 index 00000000..4f169704 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"coreHttp.d.ts","sourceRoot":"","sources":["../../../src/coreHttp.ts"],"names":[],"mappings":";AAMA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,GACtB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAC;AAC7F,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,gBAAgB,EAChB,uBAAuB,EACvB,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,oBAAoB,EACpB,eAAe,EACf,yBAAyB,EACzB,aAAa,EACb,YAAY,GACb,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC7E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,gBAAgB,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,EACxB,gBAAgB,EAChB,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EACrB,sBAAsB,EACtB,uBAAuB,EACvB,2BAA2B,GAC5B,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AAErC,OAAO,EAAE,kBAAkB,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAGpE,OAAO,EAAE,eAAe,EAAE,eAAe,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAE,gBAAgB,EAAE,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAE1D,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts new file mode 100644 index 00000000..9b87f7cf --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map new file mode 100644 index 00000000..3e83ac3b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.d.ts","sourceRoot":"","sources":["../../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,IAAI,EAAuD,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;;;GAKG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAChC,IAAI,EAAE,UAAU,GACf,CAAC,CAAC,SAAS,gBAAgB,EAC5B,aAAa,EAAE,MAAM,EACrB,gBAAgB,EAAE,CAAC,KAChB;IAAE,IAAI,EAAE,IAAI,CAAC;IAAC,cAAc,EAAE,CAAC,CAAA;CAAE,CAErC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts new file mode 100644 index 00000000..f57bdba3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map new file mode 100644 index 00000000..15d69fb3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE/C;;GAEG;AACH,eAAO,MAAM,oBAAoB,QAAgB,CAAC;AAElD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI,CAAC;IAE3D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS,CAAC;CAC3C;AAED;;;;;;GAMG;AACH,qBAAa,wBAAyB,YAAW,gBAAgB;IAC/D,OAAO,CAAC,oBAAoB,CAAS;IACrC,OAAO,CAAC,WAAW,CAAC,CAA0B;IAE9C;;;OAGG;gBACS,oBAAoB,GAAE,MAA6B;IAI/D;;;OAGG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI;IAI1D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS;CAU1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 00000000..e3c8d387 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map new file mode 100644 index 00000000..5cf3f0f0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEjF;;;;GAIG;AACH,qBAAa,oBAAoB;IAK7B,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,oCAAoC;IAN9C,OAAO,CAAC,OAAO,CAA+C;IAC9D,OAAO,CAAC,UAAU,CAAK;gBAGb,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EACzB,oCAAoC,GAAE,MAAc;IAG9D;;;OAGG;IACI,OAAO,IAAI,OAAO;IAOzB;;;;;OAKG;YACW,QAAQ;IAOtB;;;OAGG;IACI,OAAO,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;CAO3E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 00000000..a1b9a640 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 00000000..e32be598 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;OAEG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 00000000..12105982 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 00000000..c2c5c3ef --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAKjD;;GAEG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;;OAGG;IACH,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAOpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts new file mode 100644 index 00000000..d8d1c5c1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map new file mode 100644 index 00000000..8617d1b5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 00000000..a65d0470 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 00000000..8a9416e1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts new file mode 100644 index 00000000..5e939fcc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map new file mode 100644 index 00000000..3adcd9d2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts new file mode 100644 index 00000000..883145a4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map new file mode 100644 index 00000000..2613c905 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts new file mode 100644 index 00000000..a7381a24 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map new file mode 100644 index 00000000..c15b8dd5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts new file mode 100644 index 00000000..5f5f00e9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map new file mode 100644 index 00000000..060dbe01 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../../src/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts new file mode 100644 index 00000000..67ac61be --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map new file mode 100644 index 00000000..09a6518b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.d.ts","sourceRoot":"","sources":["../../../src/httpClientCache.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C,wBAAgB,0BAA0B,IAAI,UAAU,CAMvD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts new file mode 100644 index 00000000..7e487d94 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map new file mode 100644 index 00000000..e0631334 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../../src/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,cAAc,CAAC;CAC9D;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CA+B7E;AAED;;GAEG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IAInC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,CAAC,OAAO,GAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,cAAc;IAgBvE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAQ5B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts new file mode 100644 index 00000000..5e3613d0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map new file mode 100644 index 00000000..96af4ccf --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../../src/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;CAC5C;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts new file mode 100644 index 00000000..7a13ab29 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map new file mode 100644 index 00000000..879719da --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts new file mode 100644 index 00000000..cd26a555 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map new file mode 100644 index 00000000..22fa8858 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts b/node_modules/@azure/core-http/types/latest/src/log.d.ts new file mode 100644 index 00000000..d9ad771b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts.map b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map new file mode 100644 index 00000000..0a002fa3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,qCAAkC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts new file mode 100644 index 00000000..4a844e86 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,64 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private readonly cookieJar; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map new file mode 100644 index 00000000..28ab8ba3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":";AAOA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EAAE,qBAAqB,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAuBhE;;GAEG;AACH,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC;;GAEG;AACH,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF;;GAEG;AACH,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,qBAAa,eAAgB,SAAQ,SAAS;IAShC,OAAO,CAAC,gBAAgB;IARpC,OAAO,CAAC,WAAW,CAAa;IAChC,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;gBAOrE,gBAAgB,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI;CAGhF;AAiBD;;GAEG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D;AAED;;GAEG;AACH,qBAAa,mBAAoB,YAAW,UAAU;IACpD;;;;OAIG;IACG,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAyL/E,OAAO,CAAC,aAAa,CAAsC;IAC3D,OAAO,CAAC,eAAe,CAAkB;IAEzC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAuD;IAEjF,OAAO,CAAC,gBAAgB;IAqDxB;;OAEG;IAEG,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIxF;;OAEG;IACG,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAyBjF;;OAEG;IACG,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB9E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts new file mode 100644 index 00000000..7eadcda9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map new file mode 100644 index 00000000..341dc04c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../../src/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts new file mode 100644 index 00000000..6ea66e62 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map new file mode 100644 index 00000000..31a30b9e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.d.ts","sourceRoot":"","sources":["../../../src/operationOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAC;AAC1E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;IACzC;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;CAC1C;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;CAC9E;AAED;;;;GAIG;AACH,wBAAgB,oCAAoC,CAAC,CAAC,SAAS,gBAAgB,EAC7E,IAAI,EAAE,CAAC,GACN,kBAAkB,CAgBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts new file mode 100644 index 00000000..4f0aa660 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map new file mode 100644 index 00000000..70d5e72c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../../src/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE;;GAEG;AACH,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts new file mode 100644 index 00000000..7e3d06bd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map new file mode 100644 index 00000000..c6c21cfd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../../src/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts new file mode 100644 index 00000000..3bc59281 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map new file mode 100644 index 00000000..45e86788 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../../src/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;;OAGG;IACH,QAAQ,CAAC,SAAS,CAAC,EACf,MAAM,GACN,KAAK,GACL,MAAM,GACN,QAAQ,GACR,WAAW,GACX,MAAM,GACN,SAAS,GACT,MAAM,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED;;;GAGG;AACH,wBAAgB,4BAA4B,CAAC,aAAa,EAAE,aAAa,GAAG,GAAG,CAAC,MAAM,CAAC,CAYtF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts new file mode 100644 index 00000000..790b1b05 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map new file mode 100644 index 00000000..3a23655e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.d.ts","sourceRoot":"","sources":["../../../src/pipelineOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAC;AAC1E,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC9D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IAExB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;;OAGG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IAEpC;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,eAAe;IAC9D;;OAEG;IACH,sBAAsB,CAAC,EAAE,sBAAsB,CAAC;IAEhD;;OAEG;IACH,cAAc,CAAC,EAAE,gBAAgB,CAAC;IAElC;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAE7B;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;CAC7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 00000000..d9c549a4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map new file mode 100644 index 00000000..9bdd1cdd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAgC,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACjF,OAAO,EAGL,oBAAoB,EAErB,MAAM,2BAA2B,CAAC;AAgBnC,UAAU,kBAAkB;IAC1B;;;;;;;OAOG;IACH,uBAAuB,EAAE,MAAM,CAAC;IAChC;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAC1B;;;OAGG;IACH,iBAAiB,EAAE,MAAM,CAAC;CAC3B;AAGD,eAAO,MAAM,sBAAsB,EAAE,kBAIpC,CAAC;AA2KF;;;;;;GAMG;AACH,wBAAgB,+BAA+B,CAC7C,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,GACxB,oBAAoB,CAgCtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts new file mode 100644 index 00000000..2290bed1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map new file mode 100644 index 00000000..f026254d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,iBAAiB,EAAe,MAAM,2BAA2B,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAKjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;;OAGG;IACH,oBAAoB,EAAE,2BAA2B,CAAC;CACnD;AAED;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,CAAC,EAAE,iBAAiB,GACjC,oBAAoB,CAWtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF,eAAO,MAAM,6BAA6B,EAAE,sBAK3C,CAAC;AAEF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1C,SAAgB,UAAU,EAAE,MAAM,CAAC;gBAGjC,UAAU,EAAE,aAAa,EACzB,oBAAoB,EAAE,oBAAoB,EAC1C,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,GAAE,iBAAsB;IAW3B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,EAC/B,OAAO,GAAE,iBAAsB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4EhC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 00000000..f071e6e0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map new file mode 100644 index 00000000..45df97bb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAMA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAM7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED,qBAAa,kCAAmC,SAAQ,iBAAiB;gBAC3D,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAKvD,WAAW,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAGhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 00000000..949f8e2f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map new file mode 100644 index 00000000..364460e0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED;;;GAGG;AACH,qBAAa,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;gBAGS,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAIpE;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAI/E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 00000000..10fcb371 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 00000000..8d1cca62 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;;;;GAKG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAYtB;AAED;;GAEG;AACH,oBAAY,SAAS;IACnB;;;OAGG;IACH,WAAW,IAAA;CACZ;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;;OAIG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAE3B;;OAEG;IACH,IAAI,CAAC,EAAE,SAAS,CAAC;CAClB;AAED,eAAO,MAAM,mBAAmB,EAAE,YAIjC,CAAC;AAEF;;GAEG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;OAOG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM;IAUpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 00000000..827fd046 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 00000000..6c50dfe5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACrB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 00000000..dc137ca5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map new file mode 100644 index 00000000..0852850b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,MAAM,EAAE,OAAO,CAAC;CACjB;AAED;;GAEG;AACH,eAAO,MAAM,uBAAuB,EAAE,gBAErC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,gBAAgB,CAAC,EAAE,gBAAgB,GAAG,oBAAoB,CAMzF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAWlD,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAVnC;;;;;;OAMG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACZ,gBAAgB,EAAE,gBAAgB;IAKrD;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAInF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts new file mode 100644 index 00000000..98130d58 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts @@ -0,0 +1,79 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames(): Set; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames: Set); + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters(): Set; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters: Set); + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map new file mode 100644 index 00000000..c79f751d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AACzC,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;;;;;;;OAWG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;IAElC;;OAEG;IACH,MAAM,CAAC,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,cAAc,GAAE,gBAAqB,GAAG,oBAAoB,CAMrF;AAED;;GAEG;AACH,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,EAAE,QAAQ,CAAC;IACjB,SAAS,EAAE,SAAS,CAAC;IAErB;;;;;;OAMG;IACH,IAAW,kBAAkB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE3C;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,EAE5D;IAED;;;;OAIG;IACH,IAAW,sBAAsB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE/C;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,EAEpE;gBAGC,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,EACE,MAAwB,EACxB,kBAAuB,EACvB,sBAA2B,GAC5B,GAAE,gBAAqB;IAOnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAO5E,OAAO,CAAC,UAAU;IAIlB,OAAO,CAAC,WAAW;CAKpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 00000000..3ff8522d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 00000000..8cdd3381 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 00000000..1fac8884 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 00000000..30330904 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 00000000..83cec201 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map new file mode 100644 index 00000000..f1b906f7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAazD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 00000000..4993bca5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map new file mode 100644 index 00000000..cad74b01 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAMA,OAAO,EAGL,oBAAoB,EAErB,MAAM,iBAAiB,CAAC;AAIzB,wBAAgB,YAAY,IAAI,oBAAoB,CAMnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 00000000..41dab709 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 00000000..54a4ec00 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAK7D,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts new file mode 100644 index 00000000..77d535e7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map new file mode 100644 index 00000000..37f48a0a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AAEjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,eAAO,MAAM,iBAAiB,EAAE,MAAM,EAAO,CAAC;AAyD9C;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAWtC;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CACzB,aAAa,CAAC,EAAE,aAAa,EAC7B,OAAO,CAAC,EAAE;IACR,sFAAsF;IACtF,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC9B,GACA,oBAAoB,CAiBtB;AA2BD,qBAAa,WAAY,SAAQ,iBAAiB;IAIvC,aAAa,EAAE,aAAa;IACnC,OAAO,CAAC,iBAAiB,CAAC;gBAH1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,aAAa,EAAE,aAAa,EAC3B,iBAAiB,CAAC,sBAAU;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts new file mode 100644 index 00000000..0ffbe375 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map new file mode 100644 index 00000000..86218f92 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IAEzB;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,iBAAiB;IACmB,QAAQ,CAAC,UAAU;gBAA7E,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB,EAAW,UAAU,SAAK;IAIvF,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts new file mode 100644 index 00000000..71b4ac91 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map new file mode 100644 index 00000000..80d5c392 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED;;GAEG;AACH,8BAAsB,iBAAkB,YAAW,aAAa;IAK5D;;OAEG;IACH,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAX7C;;OAEG;IACH,SAAS;IACP;;OAEG;IACM,WAAW,EAAE,aAAa;IACnC;;OAEG;IACM,QAAQ,EAAE,wBAAwB;IAG7C;;;OAGG;aACa,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAoB;IACnB,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 00000000..8c70c795 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 00000000..ddd83eb3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACpB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts new file mode 100644 index 00000000..69b45319 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map new file mode 100644 index 00000000..f6ba5f45 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;;GAIG;AACH,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 00000000..11b68ec0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 00000000..1644efb7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;;;;;GAOG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;GAMG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;gBAGvB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAapB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 00000000..2b43ae1e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 00000000..847b1ce9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,aAAK,eAAe,GAAG,CACrB,WAAW,EAAE,eAAe,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAGpC;;;;;;;;;GASG;AACH,wBAAgB,qBAAqB,IAAI,oBAAoB,CAM5D;AAID;;;;;;;;GAQG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,eAAe,CAAK;gBAG1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,eAAe,CAAC,EAAE,eAAe;IAMtB,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAYxE,uBAAuB;WAkCvB,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts new file mode 100644 index 00000000..717316ec --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map new file mode 100644 index 00000000..ee3c7967 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,IAAI,EAML,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAQjD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAAC,cAAc,GAAE,oBAAyB,GAAG,oBAAoB,CAM7F;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAClD,OAAO,CAAC,SAAS,CAAC,CAAS;gBAGzB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,cAAc,EAAE,oBAAoB;IAMzB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAqBlF,aAAa,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,GAAG,SAAS;IAsDzD,OAAO,CAAC,eAAe;IAgBvB,OAAO,CAAC,kBAAkB;CAe3B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts new file mode 100644 index 00000000..bb2c3708 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map new file mode 100644 index 00000000..f5466df3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAG1F,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAE7D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAwBD,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE;;;GAGG;AACH,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAenF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,oBAAoB;IACvC,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,oBAAoB,EAC7B,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE;;OAEG;IACH,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts new file mode 100644 index 00000000..6a1ee075 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts @@ -0,0 +1,14 @@ +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map new file mode 100644 index 00000000..bef30cb2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../../src/proxyAgent.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA+BZ;AAED,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAG/C;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,MAAM,CAAC,qBAAqB,GAC1C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts new file mode 100644 index 00000000..515f1f52 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map new file mode 100644 index 00000000..2ecbfd11 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,OAAO;IACV;;OAEG;IACH,KAAK,MAAM;IACX;;OAEG;IACH,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts b/node_modules/@azure/core-http/types/latest/src/restError.d.ts new file mode 100644 index 00000000..fe375538 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map new file mode 100644 index 00000000..75a4cb73 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../../src/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAKhD;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;IAClC;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B;;OAEG;IACH,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;gBAEhB,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB;CAkBnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts new file mode 100644 index 00000000..5915bf85 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts @@ -0,0 +1,356 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map new file mode 100644 index 00000000..7076d7ff --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../../src/serializer.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,0BAA0B,CAAC;AAIvF;;;;GAIG;AACH,qBAAa,UAAU;IAEnB;;OAEG;aACa,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;IACpD;;OAEG;aACa,KAAK,CAAC;;IAPtB;;OAEG;IACa,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO;IACzD;;OAEG;IACa,KAAK,CAAC,qBAAS;IAGjC;;;;;OAKG;IACH,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IAmE7E;;;;;;;;OAQG;IACH,SAAS,CACP,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EACf,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;IA6FN;;;;;;;;OAQG;IACH,WAAW,CACT,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,OAAO,EACrB,UAAU,EAAE,MAAM,EAClB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;CAmGP;AAkyBD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED;;GAEG;AACH,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAErD;;OAEG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB;;OAEG;IACH,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED;;;GAGG;AACH,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG;;;;GAIG;AACH,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,cAAc,EAAE,MAAM,CAAC;IACvB;;;;OAIG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD;;OAEG;IACH,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD;;OAEG;IACH,IAAI,EAAE,oBAAoB,CAAC;IAC3B;;OAEG;IACH,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C;;OAEG;IACH,IAAI,EAAE,cAAc,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,WAAW,EAAE,OAAO,GAAG,GAAG,CAsBzD;AAaD;;GAEG;AAEH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts new file mode 100644 index 00000000..7fb2de30 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map new file mode 100644 index 00000000..43bc7ac5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../../src/serviceClient.ts"],"names":[],"mappings":"AAIA,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACjG,OAAO,EAEL,2BAA2B,EAE5B,MAAM,kCAAkC,CAAC;AAI1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAE9E,OAAO,EAEL,aAAa,EAGd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,aAAa,EAAgC,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAEL,qBAAqB,EAErB,eAAe,EAEhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAEL,oBAAoB,EAErB,MAAM,0BAA0B,CAAC;AAElC,OAAO,EAAE,eAAe,EAAU,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AAMtE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAgBlF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,oBAAY,YAAY,GAAG,aAAa,CAAC;AAEzC;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CACtC;AAED;;GAEG;AACH,qBAAa,aAAa;IACxB;;;OAGG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAAuB;IAE7D,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;OAIG;gBAED,WAAW,CAAC,EAAE,eAAe,GAAG,wBAAwB,EAExD,OAAO,CAAC,EAAE,oBAAoB;IA+EhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACG,oBAAoB,CACxB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CA0OzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAyGN;AAqFD;;;;;GAKG;AACH,wBAAgB,yBAAyB,CACvC,eAAe,EAAE,uBAAuB,EACxC,iBAAiB,CAAC,EAAE,oBAAoB,GACvC,oBAAoB,CAmFtB;AAED,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAmEL;AA6BD;;;;;GAKG;AACH,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAyEd"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts b/node_modules/@azure/core-http/types/latest/src/url.d.ts new file mode 100644 index 00000000..9e3359ad --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts.map b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map new file mode 100644 index 00000000..4604c580 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../../src/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;OAEG;IACI,IAAI,IAAI,MAAM,EAAE;IAIvB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,OAAO,GAAG,IAAI;IAgBhE;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,OAAO,GAAG,IAAI;IASxF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAsCX;;;OAGG;IACI,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;IAUlE;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts new file mode 100644 index 00000000..e54d7597 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map new file mode 100644 index 00000000..1d1a90ce --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts new file mode 100644 index 00000000..63e96309 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map new file mode 100644 index 00000000..2b42c1c7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts new file mode 100644 index 00000000..b474b275 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map new file mode 100644 index 00000000..04b9b888 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../../src/util/constants.ts"],"names":[],"mappings":"AAEA;;GAEG;AACH,eAAO,MAAM,SAAS;IACpB;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;WAEG;;;;;;;;;;;;;;;IAiBL;;OAEG;;QAED;;WAEG;;;QAKH;;;;WAIG;;QAGH;;WAEG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts new file mode 100644 index 00000000..12c86ad7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts @@ -0,0 +1,15 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export declare function delay(delayInMs: number, value?: T, options?: { + abortSignal?: AbortSignalLike; + abortErrorMsg?: string; +}): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map new file mode 100644 index 00000000..53e5b20e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../../../src/util/delay.ts"],"names":[],"mappings":"AAGA,OAAO,EAAc,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAKtE;;;;;;;;GAQG;AACH,wBAAgB,KAAK,CAAC,CAAC,EACrB,SAAS,EAAE,MAAM,EACjB,KAAK,CAAC,EAAE,CAAC,EACT,OAAO,CAAC,EAAE;IACR,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB,GACA,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAsCnB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 00000000..04a0945f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from "../coreHttp"; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map new file mode 100644 index 00000000..9a5f40b9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,aAAa,CAAC;AAEpD,eAAO,MAAM,0BAA0B,IAAI,CAAC;AAE5C,eAAO,MAAM,6BAA6B,QAAY,CAAC;AACvD,eAAO,MAAM,iCAAiC,QAAY,CAAC;AAC3D,eAAO,MAAM,iCAAiC,QAAW,CAAC;AAE1D,wBAAgB,QAAQ,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,MAAM,CAEhD;AACD,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CACzB,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,qBAAqB,EAAE,KAAK,CAAC,EAAE,UAAU,KAAK,OAAO,EAC5E,SAAS,EAAE,SAAS,EACpB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,KAAK,CAAC,EAAE,UAAU,GACjB,OAAO,CAMT;AAED;;;;;;;GAOG;AACH,wBAAgB,eAAe,CAC7B,YAAY,EAAE;IAAE,aAAa,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAA;CAAE,EAC3F,SAAS,GAAE,SAA+C,EAC1D,GAAG,CAAC,EAAE,UAAU,GACf,SAAS,CAyBX"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts new file mode 100644 index 00000000..e136c538 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map new file mode 100644 index 00000000..d35d3353 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,IAAK,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts new file mode 100644 index 00000000..cd664b8c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map new file mode 100644 index 00000000..37ed38ba --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,MAAM,QAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts new file mode 100644 index 00000000..c00416aa --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map new file mode 100644 index 00000000..e5cf8f54 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.d.ts","sourceRoot":"","sources":["../../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;CACnC;AAkDD,qBAAa,SAAS;IACb,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAChC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;gBAE/B,EAAE,kBAAuB,EAAE,sBAA2B,EAAE,GAAE,gBAAqB;IAapF,QAAQ,CAAC,GAAG,EAAE,OAAO,GAAG,MAAM;IA2CrC,OAAO,CAAC,eAAe;IAIvB,OAAO,CAAC,aAAa;IAIrB,OAAO,CAAC,cAAc;IAsBtB,OAAO,CAAC,WAAW;CAsBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts new file mode 100644 index 00000000..cd9881b5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map new file mode 100644 index 00000000..c6abf329 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.d.ts","sourceRoot":"","sources":["../../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 00000000..5ebc3484 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map new file mode 100644 index 00000000..5403e4d7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,8BAA8B,IAAI,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts new file mode 100644 index 00000000..e10f65bd --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts @@ -0,0 +1,7 @@ +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +//# sourceMappingURL=typeguards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map new file mode 100644 index 00000000..65796524 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.d.ts","sourceRoot":"","sources":["../../../../src/util/typeguards.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts new file mode 100644 index 00000000..3a804c58 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts @@ -0,0 +1,131 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map new file mode 100644 index 00000000..4c0165c1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;GAEG;AACH,eAAO,MAAM,MAAM,SAIM,CAAC;AAE1B;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;GAMG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;GAKG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;GAQG;AACH,wBAAgB,2BAA2B,CACzC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAC5B,SAAS,EAAE,OAAO,GACjB,OAAO,CAAC,GAAG,CAAC,CAMd;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AAEH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,KAAK,IAAI,CAgB/E;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EACxC,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GACtC,CAAC,EAAE,EAAE,eAAe,CAAC,CAAC,CAAC,KAAK,IAAI,CAalC;AAED,wBAAgB,kBAAkB,CAChC,GAAG,EAAE,OAAO,EACZ,WAAW,EAAE,MAAM,EACnB,eAAe,CAAC,EAAE,MAAM,EACxB,YAAY,CAAC,EAAE,MAAM,GACpB;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAYtB;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,eAAe,EAAE,OAAO,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAS9E;AAKD;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAEvD;AAED,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AAED;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts new file mode 100644 index 00000000..c92f70d2 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map new file mode 100644 index 00000000..269d7095 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AAgClF,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAqBhF;AA4FD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAWnF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts new file mode 100644 index 00000000..088841b9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map new file mode 100644 index 00000000..e06390ce --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AA0DlF;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAK/E;AAED;;;;GAIG;AACH,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAiBhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts new file mode 100644 index 00000000..a036e534 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map new file mode 100644 index 00000000..369a7531 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../../src/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAC3D,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAG7D;;GAEG;AACH,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AAEZ;;GAEG;AACH,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CAuB5E;AAED;;;;;GAKG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAE1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;gBAGvB,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,OAAO,EACd,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,kBAAkB,CAAC,EAAE,OAAO,EAC5B,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC;IAqBzC;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IAwNpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAChD;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,KAAK,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAE7E;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;IAEnB;;OAEG;IACH,iBAAiB,CAAC,EAAE,iBAAiB,CAAC;CACvC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts new file mode 100644 index 00000000..eea3572d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map new file mode 100644 index 00000000..3c18856f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAyB,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CA6F7E;AAwED,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,GAAG,eAAe,CAajE"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/CHANGELOG.md b/node_modules/@azure/core-lro/CHANGELOG.md new file mode 100644 index 00000000..7b16b27f --- /dev/null +++ b/node_modules/@azure/core-lro/CHANGELOG.md @@ -0,0 +1,82 @@ +# Release History + +## 2.2.4 (2022-03-07) + +### Bugs Fixed + +- Fix polling so that resources created in a different URL will be retrieved once polling is done. [PR #20656](https://github.com/Azure/azure-sdk-for-js/pull/20656) + +## 2.2.3 (2022-01-06) + +### Bugs Fixed + +- Fix an issue where we treat Retry-After value as milliseconds. It is actually in seconds. [PR #19479](https://github.com/Azure/azure-sdk-for-js/pull/19479) + +## 2.2.2 (2021-12-02) + +### Bugs Fixed + +- Fix LRO PATCH operations when their results are located in a different URL. [PR #18820](https://github.com/Azure/azure-sdk-for-js/pull/18820) + +## 2.2.1 (2021-09-30) + +### Bugs Fixed + +- Check for string type before calling toLowerCase(). [PR #17573](https://github.com/Azure/azure-sdk-for-js/pull/17573) + +### Other Changes + +- Updates package to work with the react native bundler. [PR #17783](https://github.com/Azure/azure-sdk-for-js/pull/17783) + +## 2.2.0 (2021-08-05) + +### Features Added + +- `LroEngine` supports a new `isDone()` function in its options bag which can be used to provide a custom logic for determining when an LRO finished processing. + +## 2.1.0 (2021-07-19) + +### Features Added + +- Provides a long-running operation engine. + +## 2.0.0 (2021-06-30) + +### New Features + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features + +## 1.0.5 (2021-04-12) + +- No functionality changes from 1.0.4. This release is to correct an issue where 1.0.4 shipped with modules in the wrong format (cjs instead of es6.) + +## 1.0.4 (2021-03-30) + +- Bug fix: Fix an issue where we might return stale state if the `update` implementation reassigns `operation.state`. + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.11`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +## 1.0.3 (2021-01-07) + +- Updates the `tslib` dependency to version 2.x. + +## 1.0.2 (2020-04-28) + +- Moved `@opentelemetry/types` to the `devDependencies`. + +## 1.0.1 (2020-02-28) + +- `getOperationState()` now returns `TState`. +- `TState` of `PollerLike` can be a subset of `TState` of `Poller`, + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/core-lro` package. + +- Updated PollOperation to be more strictly typed. + +## 1.0.0-preview.1 (2019-10-22) + +- Initial implementation of an abstraction for Long Running Operations (LROs). diff --git a/node_modules/@azure/core-lro/LICENSE b/node_modules/@azure/core-lro/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/core-lro/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-lro/README.md b/node_modules/@azure/core-lro/README.md new file mode 100644 index 00000000..e00b7409 --- /dev/null +++ b/node_modules/@azure/core-lro/README.md @@ -0,0 +1,92 @@ +# Azure Core LRO client library for JavaScript + +`@azure/core-lro` is a JavaScript library that manages long running operations (LROs) against Azure services. Until completion, such operations require consecutive calls to Azure services to update a local representation of the remote operation status. + +**Please note:** This library is included with other Azure SDK libraries that need it. It should not be used as a direct dependency in your projects. + +`@azure/core-lro` is made following our [Long Running Operations guidelines](https://azure.github.io/azure-sdk/typescript_design.html#ts-lro) + +Key links: +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro) +- [Package (npm)](https://www.npmjs.com/package/@azure/core-lro) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/core-lro) +- [Samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) + +## Getting started + +### Install the package + +To install this library for a project under the `azure-sdk-for-js`, make sure you are at the root of that project, then use [Rush](https://rushjs.io/) as follows: + +``` +rush add -p @azure/core-lro +``` + +To install this package outside of the `azure-sdk-for-js`, use `npm install --save @azure/core-lro`. + +### Configure TypeScript + +TypeScript users need to have Node type definitions installed: + +```bash +npm install @types/node +``` + +They will also need to enable `compilerOptions.allowSyntheticDefaultImports` in their +`tsconfig.json`. Note that if you have enabled `compilerOptions.esModuleInterop`, +`allowSyntheticDefaultImports` is enabled by default. +See [TypeScript's compiler options handbook](https://www.typescriptlang.org/docs/handbook/compiler-options.html) +for more information. + +## Key concepts + +@azure/core-lro makes a distinction between the Long Running Operation and its Poller. + +- Whenever we talk about an **operation**, we mean the static representation of a Long Running Operation. + Any operation will have a definition of a **state**, which has an opinionated default set of properties. + The definition of the operation will also have functions that will define how to request new information + about the pending operation, how to request its cancellation, and how to serialize it. +- A **Poller** is an object who's main function is to interact with an operation until the poller is manually stopped, + the operation finishes (either by succeeding or failing) or if a manual request to cancel the operation has succeeded. + Some characteristics of the pollers are: + - Pollers show the status of the polling behavior. + - Pollers support manual as well as automatic polling. + - Pollers are serializable and can resume from a serialized operation. + - Pollers also specify how much of the operation's state is going to be available to the public. +- A **PollerLike** is the public interface of a Poller, which allows for different implementations to be used. + +## Examples + +You will be able to find some working examples of an implementation of an operation and a poller in: + +- [The `@azure/core-lro` samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro/samples). +- [The `@azure/core-lro` tests](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro/test). + +## Troubleshooting + +### Logging + +Logs can be added at the discretion of the library implementing the Long Running Operation poller. +Packages inside of [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) use +[@azure/logger](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger). + +## Next steps + +Please take a look at the [samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) directory for detailed examples on how to use this library. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +### Testing + +To run our tests, first install the dependencies (with `npm install` or `rush install`), +then run the unit tests with: `npm run unit-test`. + +### Code of Conduct + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-lro%2FREADME.png) diff --git a/node_modules/@azure/core-lro/dist-esm/src/index.js b/node_modules/@azure/core-lro/dist-esm/src/index.js new file mode 100644 index 00000000..b3cb4874 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/index.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./pollOperation"; +export * from "./poller"; +export * from "./lroEngine"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/index.js.map b/node_modules/@azure/core-lro/dist-esm/src/index.js.map new file mode 100644 index 00000000..45a9ab1b --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,iBAAiB,CAAC;AAChC,cAAc,UAAU,CAAC;AACzB,cAAc,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./pollOperation\";\nexport * from \"./poller\";\nexport * from \"./lroEngine\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js new file mode 100644 index 00000000..301e7ec7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { failureStates, successStates, } from "./models"; +import { isUnexpectedPollingResponse } from "./requestUtils"; +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return typeof state === "string" ? state.toLowerCase() : "succeeded"; +} +export function isBodyPollingDone(rawResponse) { + const state = getProvisioningState(rawResponse); + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function processBodyPollingOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); +} +//# sourceMappingURL=bodyPolling.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map new file mode 100644 index 00000000..8a06d124 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bodyPolling.js","sourceRoot":"","sources":["../../../src/lroEngine/bodyPolling.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAKL,aAAa,EACb,aAAa,GACd,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAE7D,SAAS,oBAAoB,CAAC,WAAwB;;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAG,MAAC,WAAW,CAAC,IAAgB,mCAAI,EAAE,CAAC;IAC9E,MAAM,KAAK,GAAuB,MAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,iBAAiB,mCAAI,iBAAiB,CAAC;IACrF,OAAO,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;AACvE,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,WAAwB;IACxD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;QAC7E,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,GAAG,CAAC,CAAC;KAC7F;IACD,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iCAAiC,CAC/C,QAA8B;IAE9B,uCACK,QAAQ,KACX,IAAI,EAAE,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,IAC7C;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroBody,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction getProvisioningState(rawResponse: RawResponse): string {\n const { properties, provisioningState } = (rawResponse.body as LroBody) ?? {};\n const state: string | undefined = properties?.provisioningState ?? provisioningState;\n return typeof state === \"string\" ? state.toLowerCase() : \"succeeded\";\n}\n\nexport function isBodyPollingDone(rawResponse: RawResponse): boolean {\n const state = getProvisioningState(rawResponse);\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Creates a polling strategy based on BodyPolling which uses the provisioning state\n * from the result to determine the current operation state\n */\nexport function processBodyPollingOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: isBodyPollingDone(response.rawResponse),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js new file mode 100644 index 00000000..9a5cd7d1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { LroEngine } from "./lroEngine"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map new file mode 100644 index 00000000..1d9a2e5b --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/lroEngine/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n LroEngineOptions,\n RawResponse,\n} from \"./models\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js new file mode 100644 index 00000000..aac45a24 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { failureStates, successStates, } from "./models"; +import { isUnexpectedPollingResponse } from "./requestUtils"; +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +export function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; +} +//# sourceMappingURL=locationPolling.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map new file mode 100644 index 00000000..8537ba52 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map @@ -0,0 +1 @@ +{"version":3,"file":"locationPolling.js","sourceRoot":"","sources":["../../../src/lroEngine/locationPolling.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAOL,aAAa,EACb,aAAa,GACd,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAE7D,SAAS,aAAa,CAAC,WAAwB;;IAC7C,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,WAAW,CAAC,UAAU,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IACD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAC,WAAW,CAAC,IAAgB,mCAAI,EAAE,CAAC;IACvD,MAAM,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;IAC9E,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;QAC7E,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,GAAG,CAAC,CAAC;KAC7F;IACD,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,gBAAgB,CAC7B,GAAkC,EAClC,gBAAwB,EACxB,yBAAqD;IAErD,QAAQ,yBAAyB,EAAE;QACjC,KAAK,cAAc;YACjB,OAAO,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;QAC9C,KAAK,uBAAuB;YAC1B,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB;YACE,OAAO,GAAG,CAAC,eAAe,CAAC,gBAAgB,aAAhB,gBAAgB,cAAhB,gBAAgB,GAAI,GAAG,CAAC,WAAW,CAAC,CAAC;KACnE;AACH,CAAC;AAED,MAAM,UAAU,qCAAqC,CACnD,GAAkC,EAClC,gBAAyB,EACzB,yBAAqD;IAErD,OAAO,CAAC,QAA8B,EAAsB,EAAE;QAC5D,IAAI,aAAa,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;YACvC,IAAI,gBAAgB,KAAK,SAAS,EAAE;gBAClC,uCAAY,QAAQ,KAAE,IAAI,EAAE,IAAI,IAAG;aACpC;iBAAM;gBACL,uCACK,QAAQ,KACX,IAAI,EAAE,KAAK,EACX,IAAI,EAAE,KAAK,IAAI,EAAE;wBACf,MAAM,aAAa,GAAG,MAAM,gBAAgB,CAC1C,GAAG,EACH,gBAAgB,EAChB,yBAAyB,CAC1B,CAAC;wBACF,uCACK,CAAC,aAAa,aAAb,aAAa,cAAb,aAAa,GAAI,QAAQ,CAAC,KAC9B,IAAI,EAAE,IAAI,IACV;oBACJ,CAAC,IACD;aACH;SACF;QACD,uCACK,QAAQ,KACX,IAAI,EAAE,KAAK,IACX;IACJ,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroBody,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction isPollingDone(rawResponse: RawResponse): boolean {\n if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) {\n return false;\n }\n const { status } = (rawResponse.body as LroBody) ?? {};\n const state = typeof status === \"string\" ? status.toLowerCase() : \"succeeded\";\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Sends a request to the URI of the provisioned resource if needed.\n */\nasync function sendFinalRequest(\n lro: LongRunningOperation,\n resourceLocation: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): Promise | undefined> {\n switch (lroResourceLocationConfig) {\n case \"original-uri\":\n return lro.sendPollRequest(lro.requestPath);\n case \"azure-async-operation\":\n return undefined;\n case \"location\":\n default:\n return lro.sendPollRequest(resourceLocation ?? lro.requestPath);\n }\n}\n\nexport function processLocationPollingOperationResult(\n lro: LongRunningOperation,\n resourceLocation?: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): (response: LroResponse) => LroStatus {\n return (response: LroResponse): LroStatus => {\n if (isPollingDone(response.rawResponse)) {\n if (resourceLocation === undefined) {\n return { ...response, done: true };\n } else {\n return {\n ...response,\n done: false,\n next: async () => {\n const finalResponse = await sendFinalRequest(\n lro,\n resourceLocation,\n lroResourceLocationConfig\n );\n return {\n ...(finalResponse ?? response),\n done: true,\n };\n },\n };\n }\n }\n return {\n ...response,\n done: false,\n };\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js new file mode 100644 index 00000000..094bfe42 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export const logger = createClientLogger("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map new file mode 100644 index 00000000..161d8f71 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../../src/lroEngine/logger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;;GAGG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js new file mode 100644 index 00000000..a25411fd --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { GenericPollOperation } from "./operation"; +import { Poller } from "../poller"; +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); + } +} +/** + * The LRO Engine, a class that performs polling. + */ +export class LroEngine extends Poller { + constructor(lro, options) { + const { intervalInMs = 2000, resumeFrom } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + super(operation); + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map new file mode 100644 index 00000000..3218c695 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../src/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAQlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,aAAa,CAAC;AAEnD,OAAO,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAEnC,SAAS,gBAAgB,CACvB,eAAuB;IAEvB,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;KAC1C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,2CAA2C,eAAe,EAAE,CAAC,CAAC;KAC/E;AACH,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,SAA+D,SAAQ,MAGnF;IAGC,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EAAE,YAAY,GAAG,IAAI,EAAE,UAAU,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAkD,UAAU;YACrE,CAAC,CAAC,gBAAgB,CAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAoD,CAAC;QAE1D,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,yBAAyB,EAClC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,EACtB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAChB,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroEngineOptions,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { GenericPollOperation } from \"./operation\";\nimport { PollOperationState } from \"../pollOperation\";\nimport { Poller } from \"../poller\";\n\nfunction deserializeState(\n serializedState: string\n): TState & ResumablePollOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`);\n }\n}\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const { intervalInMs = 2000, resumeFrom } = options || {};\n const state: TState & ResumablePollOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as TState & ResumablePollOperationState);\n\n const operation = new GenericPollOperation(\n state,\n lro,\n options?.lroResourceLocationConfig,\n options?.processResult,\n options?.updateState,\n options?.isDone\n );\n super(operation);\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js new file mode 100644 index 00000000..fb9667db --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const successStates = ["succeeded"]; +export const failureStates = ["failed", "canceled", "cancelled"]; +/** + * The LRO states that signal that the LRO has completed. + */ +export const terminalStates = successStates.concat(failureStates); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map new file mode 100644 index 00000000..b40cc57c --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/lroEngine/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkClC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,WAAW,CAAC,CAAC;AAC3C,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,QAAQ,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;AACjE;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,aAAa,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperationState } from \"../pollOperation\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n}\n\nexport const successStates = [\"succeeded\"];\nexport const failureStates = [\"failed\", \"canceled\", \"cancelled\"];\n/**\n * The LRO states that signal that the LRO has completed.\n */\nexport const terminalStates = successStates.concat(failureStates);\n\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface LroBody extends Record {\n /** The status of the operation. */\n status?: string;\n /** The state of the provisioning process */\n provisioningState?: string;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: string } & Record;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/** The type of which LRO implementation being followed by a specific API. */\nexport type LroMode = \"Location\" | \"Body\";\n\n/**\n * The configuration of a LRO to determine how to perform polling and checking whether the operation has completed.\n */\nexport interface LroConfig {\n /** The LRO mode */\n mode?: LroMode;\n /** The path of a provisioned resource */\n resourceLocation?: string;\n}\n\n/**\n * Type of a polling operation state that can actually be resumed.\n */\nexport type ResumablePollOperationState = PollOperationState & {\n initialRawResponse?: RawResponse;\n config?: LroConfig;\n pollingURL?: string;\n};\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n\n/**\n * The type of a terminal state of an LRO.\n */\nexport interface LroTerminalState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: true;\n}\n\n/**\n * The type of an in-progress state of an LRO.\n */\nexport interface LroInProgressState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: false;\n /**\n * The request to be sent next if it is different from the standard polling one.\n * Notice that it will disregard any polling URLs provided to it.\n */\n next?: () => Promise>;\n}\n\n/**\n * The type of an LRO state which is a tagged union of terminal and in-progress states.\n */\nexport type LroStatus = LroTerminalState | LroInProgressState;\n\n/**\n * The type of the getLROStatusFromResponse method. It takes the response as input and returns along the response whether the operation has finished.\n */\nexport type GetLroStatusFromResponse = (response: LroResponse) => LroStatus;\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path.\n */\n requestPath: string;\n /**\n * The HTTP request method.\n */\n requestMethod: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (path: string) => Promise>;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js new file mode 100644 index 00000000..c4d29f9a --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createGetLroStatusFromResponse, createInitializeState, createPoll } from "./stateMachine"; +import { getPollingUrl } from "./requestUtils"; +import { logger } from "./logger"; +export class GenericPollOperation { + constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + /** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ + async update(options) { + var _a, _b, _c; + const state = this.state; + let lastResponse = undefined; + if (!state.isStarted) { + const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); + lastResponse = await this.lro.sendInitialRequest(); + initializeState(lastResponse); + } + if (!state.isCompleted) { + if (!this.poll || !this.getLroStatusFromResponse) { + if (!state.config) { + throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); + } + const isDone = this.isDone; + this.getLroStatusFromResponse = isDone + ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) + : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); + this.poll = createPoll(this.lro); + } + if (!state.pollingURL) { + throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); + } + const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); + logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); + if (currentState.done) { + state.result = this.processResult + ? this.processResult(currentState.flatResponse, state) + : currentState.flatResponse; + state.isCompleted = true; + } + else { + this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; + state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); + } + lastResponse = currentState; + } + logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); + if (lastResponse) { + (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); + } + else { + logger.error(`LRO: no response was received`); + } + (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); + return this; + } + async cancel() { + this.state.isCancelled = true; + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map new file mode 100644 index 00000000..4843c6c6 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/lroEngine/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,OAAO,EAAE,8BAA8B,EAAE,qBAAqB,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAEnG,OAAO,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAC/C,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,OAAO,oBAAoB;IAW/B,YACS,KAAoD,EACnD,GAAkC,EAClC,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAL3D,UAAK,GAAL,KAAK,CAA+C;QACnD,QAAG,GAAH,GAAG,CAA+B;QAClC,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,MAAM,CAAC,OAGZ;;QACC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;QACzB,IAAI,YAAY,GAAqC,SAAS,CAAC;QAC/D,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;YACpB,MAAM,eAAe,GAAG,qBAAqB,CAC3C,KAAK,EACL,IAAI,CAAC,GAAG,CAAC,WAAW,EACpB,IAAI,CAAC,GAAG,CAAC,aAAa,CACvB,CAAC;YACF,YAAY,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,kBAAkB,EAAE,CAAC;YACnD,eAAe,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,wBAAwB,EAAE;gBAChD,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE;oBACjB,MAAM,IAAI,KAAK,CACb,wFAAwF,CACzF,CAAC;iBACH;gBACD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;gBAC3B,IAAI,CAAC,wBAAwB,GAAG,MAAM;oBACpC,CAAC,CAAC,CAAC,QAA8B,EAAE,EAAE,CAAC,iCAC/B,QAAQ,KACX,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,IAC/C;oBACJ,CAAC,CAAC,8BAA8B,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,yBAAyB,CAAC,CAAC;gBAC3F,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAClC;YACD,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;gBACrB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;aACH;YACD,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,IAAI,CAClC,KAAK,CAAC,UAAU,EAChB,IAAI,CAAC,YAAa,EAClB,IAAI,CAAC,wBAAwB,CAC9B,CAAC;YACF,MAAM,CAAC,OAAO,CAAC,0BAA0B,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC;YACrF,IAAI,YAAY,CAAC,IAAI,EAAE;gBACrB,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,aAAa;oBAC/B,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,YAAY,EAAE,KAAK,CAAC;oBACtD,CAAC,CAAC,YAAY,CAAC,YAAY,CAAC;gBAC9B,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;iBAAM;gBACL,IAAI,CAAC,IAAI,GAAG,MAAA,YAAY,CAAC,IAAI,mCAAI,IAAI,CAAC,IAAI,CAAC;gBAC3C,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,YAAY,CAAC,WAAW,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;aAC9E;YACD,YAAY,GAAG,YAAY,CAAC;SAC7B;QACD,MAAM,CAAC,OAAO,CAAC,uBAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC/D,IAAI,YAAY,EAAE;YAChB,MAAA,IAAI,CAAC,WAAW,+CAAhB,IAAI,EAAe,KAAK,EAAE,YAAY,aAAZ,YAAY,uBAAZ,YAAY,CAAE,WAAW,CAAC,CAAC;SACtD;aAAM;YACL,MAAM,CAAC,KAAK,CAAC,+BAA+B,CAAC,CAAC;SAC/C;QACD,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,YAAY,+CAArB,OAAO,EAAiB,KAAK,CAAC,CAAC;QAC/B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n RawResponse,\n ResumablePollOperationState,\n} from \"./models\";\nimport { PollOperation, PollOperationState } from \"../pollOperation\";\nimport { createGetLroStatusFromResponse, createInitializeState, createPoll } from \"./stateMachine\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { getPollingUrl } from \"./requestUtils\";\nimport { logger } from \"./logger\";\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private poll?: (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ) => Promise>;\n private pollerConfig?: PollerConfig;\n private getLroStatusFromResponse?: GetLroStatusFromResponse;\n\n constructor(\n public state: TState & ResumablePollOperationState,\n private lro: LongRunningOperation,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n /**\n * General update function for LROPoller, the general process is as follows\n * 1. Check initial operation result to determine the strategy to use\n * - Strategies: Location, Azure-AsyncOperation, Original Uri\n * 2. Check if the operation result has a terminal state\n * - Terminal state will be determined by each strategy\n * 2.1 If it is terminal state Check if a final GET request is required, if so\n * send final GET request and return result from operation. If no final GET\n * is required, just return the result from operation.\n * - Determining what to call for final request is responsibility of each strategy\n * 2.2 If it is not terminal state, call the polling operation and go to step 1\n * - Determining what to call for polling is responsibility of each strategy\n * - Strategies will always use the latest URI for polling if provided otherwise\n * the last known one\n */\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const state = this.state;\n let lastResponse: LroResponse | undefined = undefined;\n if (!state.isStarted) {\n const initializeState = createInitializeState(\n state,\n this.lro.requestPath,\n this.lro.requestMethod\n );\n lastResponse = await this.lro.sendInitialRequest();\n initializeState(lastResponse);\n }\n\n if (!state.isCompleted) {\n if (!this.poll || !this.getLroStatusFromResponse) {\n if (!state.config) {\n throw new Error(\n \"Bad state: LRO mode is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const isDone = this.isDone;\n this.getLroStatusFromResponse = isDone\n ? (response: LroResponse) => ({\n ...response,\n done: isDone(response.flatResponse, this.state),\n })\n : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig);\n this.poll = createPoll(this.lro);\n }\n if (!state.pollingURL) {\n throw new Error(\n \"Bad state: polling URL is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const currentState = await this.poll(\n state.pollingURL,\n this.pollerConfig!,\n this.getLroStatusFromResponse\n );\n logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`);\n if (currentState.done) {\n state.result = this.processResult\n ? this.processResult(currentState.flatResponse, state)\n : currentState.flatResponse;\n state.isCompleted = true;\n } else {\n this.poll = currentState.next ?? this.poll;\n state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL);\n }\n lastResponse = currentState;\n }\n logger.verbose(`LRO: current state: ${JSON.stringify(state)}`);\n if (lastResponse) {\n this.updateState?.(state, lastResponse?.rawResponse);\n } else {\n logger.error(`LRO: no response was received`);\n }\n options?.fireProgress?.(state);\n return this;\n }\n\n async cancel(): Promise> {\n this.state.isCancelled = true;\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js new file mode 100644 index 00000000..a5f21ae1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function processPassthroughOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: true }); +} +//# sourceMappingURL=passthrough.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map new file mode 100644 index 00000000..eb906859 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map @@ -0,0 +1 @@ +{"version":3,"file":"passthrough.js","sourceRoot":"","sources":["../../../src/lroEngine/passthrough.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,UAAU,iCAAiC,CAC/C,QAA8B;IAE9B,uCACK,QAAQ,KACX,IAAI,EAAE,IAAI,IACV;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResponse, LroStatus } from \"./models\";\n\nexport function processPassthroughOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: true,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js new file mode 100644 index 00000000..4b5c0726 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Detects where the continuation token is and returns it. Notice that azure-asyncoperation + * must be checked first before the other location headers because there are scenarios + * where both azure-asyncoperation and location could be present in the same response but + * azure-asyncoperation should be the one to use for polling. + */ +export function getPollingUrl(rawResponse, defaultPath) { + var _a, _b, _c; + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); +} +function getLocation(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocation(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperation(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } + } +} +export function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || + getOperationLocation(rawResponse) !== undefined) { + return { + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", + }; + } + else if (["PUT", "PATCH"].includes(requestMethod)) { + return { + mode: "Body", + }; + } + return {}; +} +class SimpleRestError extends Error { + constructor(message, statusCode) { + super(message); + this.name = "RestError"; + this.statusCode = statusCode; + Object.setPrototypeOf(this, SimpleRestError.prototype); + } +} +export function isUnexpectedInitialResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![203, 204, 202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); + } + return false; +} +export function isUnexpectedPollingResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); + } + return false; +} +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map new file mode 100644 index 00000000..80a36ccb --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../../src/lroEngine/requestUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;;GAKG;AACH,MAAM,UAAU,aAAa,CAAC,WAAwB,EAAE,WAAmB;;IACzE,OAAO,CACL,MAAA,MAAA,MAAA,sBAAsB,CAAC,WAAW,CAAC,mCACnC,oBAAoB,CAAC,WAAW,CAAC,mCACjC,WAAW,CAAC,WAAW,CAAC,mCACxB,WAAW,CACZ,CAAC;AACJ,CAAC;AAED,SAAS,WAAW,CAAC,WAAwB;IAC3C,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,sBAAsB,CAAC,WAAwB;IACtD,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAC3B,aAAqB,EACrB,WAAwB,EACxB,WAAmB;IAEnB,QAAQ,aAAa,EAAE;QACrB,KAAK,KAAK,CAAC,CAAC;YACV,OAAO,WAAW,CAAC;SACpB;QACD,KAAK,MAAM,CAAC;QACZ,KAAK,OAAO,CAAC,CAAC;YACZ,OAAO,WAAW,CAAC,WAAW,CAAC,CAAC;SACjC;QACD,OAAO,CAAC,CAAC;YACP,OAAO,SAAS,CAAC;SAClB;KACF;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,WAAmB,EACnB,aAAqB,EACrB,WAAwB;IAExB,IACE,sBAAsB,CAAC,WAAW,CAAC,KAAK,SAAS;QACjD,oBAAoB,CAAC,WAAW,CAAC,KAAK,SAAS,EAC/C;QACA,OAAO;YACL,IAAI,EAAE,UAAU;YAChB,gBAAgB,EAAE,oBAAoB,CAAC,aAAa,EAAE,WAAW,EAAE,WAAW,CAAC;SAChF,CAAC;KACH;SAAM,IAAI,WAAW,CAAC,WAAW,CAAC,KAAK,SAAS,EAAE;QACjD,OAAO;YACL,IAAI,EAAE,UAAU;SACjB,CAAC;KACH;SAAM,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,EAAE;QACnD,OAAO;YACL,IAAI,EAAE,MAAM;SACb,CAAC;KACH;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,MAAM,eAAgB,SAAQ,KAAK;IAEjC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;QACxB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAC;IACzD,CAAC;CACF;AAED,MAAM,UAAU,2BAA2B,CAAC,WAAwB;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;IACpC,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QAClD,MAAM,IAAI,eAAe,CACvB,wCAAwC,IAAI,6DAA6D,EACzG,IAAI,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,2BAA2B,CAAC,WAAwB;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;IACpC,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QACxC,MAAM,IAAI,eAAe,CACvB,wCAAwC,IAAI,mDAAmD,EAC/F,IAAI,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroConfig, RawResponse } from \"./models\";\n\n/**\n * Detects where the continuation token is and returns it. Notice that azure-asyncoperation\n * must be checked first before the other location headers because there are scenarios\n * where both azure-asyncoperation and location could be present in the same response but\n * azure-asyncoperation should be the one to use for polling.\n */\nexport function getPollingUrl(rawResponse: RawResponse, defaultPath: string): string {\n return (\n getAzureAsyncOperation(rawResponse) ??\n getOperationLocation(rawResponse) ??\n getLocation(rawResponse) ??\n defaultPath\n );\n}\n\nfunction getLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(\n requestMethod: string,\n rawResponse: RawResponse,\n requestPath: string\n): string | undefined {\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"POST\":\n case \"PATCH\": {\n return getLocation(rawResponse);\n }\n default: {\n return undefined;\n }\n }\n}\n\nexport function inferLroMode(\n requestPath: string,\n requestMethod: string,\n rawResponse: RawResponse\n): LroConfig {\n if (\n getAzureAsyncOperation(rawResponse) !== undefined ||\n getOperationLocation(rawResponse) !== undefined\n ) {\n return {\n mode: \"Location\",\n resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath),\n };\n } else if (getLocation(rawResponse) !== undefined) {\n return {\n mode: \"Location\",\n };\n } else if ([\"PUT\", \"PATCH\"].includes(requestMethod)) {\n return {\n mode: \"Body\",\n };\n }\n return {};\n}\n\nclass SimpleRestError extends Error {\n public statusCode?: number;\n constructor(message: string, statusCode: number) {\n super(message);\n this.name = \"RestError\";\n this.statusCode = statusCode;\n\n Object.setPrototypeOf(this, SimpleRestError.prototype);\n }\n}\n\nexport function isUnexpectedInitialResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![203, 204, 202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n\nexport function isUnexpectedPollingResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js new file mode 100644 index 00000000..90541c69 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from "./requestUtils"; +import { isBodyPollingDone, processBodyPollingOperationResult } from "./bodyPolling"; +import { logger } from "./logger"; +import { processLocationPollingOperationResult } from "./locationPolling"; +import { processPassthroughOperationResult } from "./passthrough"; +/** + * creates a stepping function that maps an LRO state to another. + */ +export function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { + switch (config.mode) { + case "Location": { + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); + } + case "Body": { + return processBodyPollingOperationResult; + } + default: { + return processPassthroughOperationResult; + } + } +} +/** + * Creates a polling operation. + */ +export function createPoll(lroPrimitives) { + return async (path, pollerConfig, getLroStatusFromResponse) => { + const response = await lroPrimitives.sendPollRequest(path); + const retryAfter = response.rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) + : retryAfterInSeconds * 1000; + } + return getLroStatusFromResponse(response); + }; +} +function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return defaultIntervalInMs; +} +/** + * Creates a callback to be used to initialize the polling operation state. + * @param state - of the polling operation + * @param operationSpec - of the LRO + * @param callback - callback to be called when the operation is done + * @returns callback that initializes the state of the polling operation + */ +export function createInitializeState(state, requestPath, requestMethod) { + return (response) => { + if (isUnexpectedInitialResponse(response.rawResponse)) + return true; + state.initialRawResponse = response.rawResponse; + state.isStarted = true; + state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); + state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); + /** short circuit polling if body polling is done in the initial request */ + if (state.config.mode === undefined || + (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { + state.result = response.flatResponse; + state.isCompleted = true; + } + logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); + return Boolean(state.isCompleted); + }; +} +//# sourceMappingURL=stateMachine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map new file mode 100644 index 00000000..9ee42afa --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"stateMachine.js","sourceRoot":"","sources":["../../../src/lroEngine/stateMachine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAC1F,OAAO,EAAE,iBAAiB,EAAE,iCAAiC,EAAE,MAAM,eAAe,CAAC;AACrF,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,mBAAmB,CAAC;AAC1E,OAAO,EAAE,iCAAiC,EAAE,MAAM,eAAe,CAAC;AAElE;;GAEG;AACH,MAAM,UAAU,8BAA8B,CAC5C,aAA4C,EAC5C,MAAiB,EACjB,yBAAqD;IAErD,QAAQ,MAAM,CAAC,IAAI,EAAE;QACnB,KAAK,UAAU,CAAC,CAAC;YACf,OAAO,qCAAqC,CAC1C,aAAa,EACb,MAAM,CAAC,gBAAgB,EACvB,yBAAyB,CAC1B,CAAC;SACH;QACD,KAAK,MAAM,CAAC,CAAC;YACX,OAAO,iCAAiC,CAAC;SAC1C;QACD,OAAO,CAAC,CAAC;YACP,OAAO,iCAAiC,CAAC;SAC1C;KACF;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CACxB,aAA4C;IAM5C,OAAO,KAAK,EACV,IAAY,EACZ,YAA0B,EAC1B,wBAA2D,EAC9B,EAAE;QAC/B,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAuB,QAAQ,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;QACnF,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,wEAAwE;YACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;YACjD,YAAY,CAAC,YAAY,GAAG,KAAK,CAAC,mBAAmB,CAAC;gBACpD,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,YAAY,CAAC;gBACnF,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;SAChC;QACD,OAAO,wBAAwB,CAAC,QAAQ,CAAC,CAAC;IAC5C,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,gCAAgC,CACvC,cAAoB,EACpB,mBAA2B;IAE3B,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE;QAC5B,OAAO,cAAc,GAAG,OAAO,CAAC;KACjC;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,qBAAqB,CACnC,KAA2C,EAC3C,WAAmB,EACnB,aAAqB;IAErB,OAAO,CAAC,QAA8B,EAAW,EAAE;QACjD,IAAI,2BAA2B,CAAC,QAAQ,CAAC,WAAW,CAAC;YAAE,OAAO,IAAI,CAAC;QACnE,KAAK,CAAC,kBAAkB,GAAG,QAAQ,CAAC,WAAW,CAAC;QAChD,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;QACxE,KAAK,CAAC,MAAM,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAClF,2EAA2E;QAC3E,IACE,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,SAAS;YAC/B,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,MAAM,IAAI,iBAAiB,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,EAC7E;YACA,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC,YAAuB,CAAC;YAChD,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;QACD,MAAM,CAAC,OAAO,CAAC,uBAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC/D,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;IACpC,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroConfig,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from \"./requestUtils\";\nimport { isBodyPollingDone, processBodyPollingOperationResult } from \"./bodyPolling\";\nimport { logger } from \"./logger\";\nimport { processLocationPollingOperationResult } from \"./locationPolling\";\nimport { processPassthroughOperationResult } from \"./passthrough\";\n\n/**\n * creates a stepping function that maps an LRO state to another.\n */\nexport function createGetLroStatusFromResponse(\n lroPrimitives: LongRunningOperation,\n config: LroConfig,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): GetLroStatusFromResponse {\n switch (config.mode) {\n case \"Location\": {\n return processLocationPollingOperationResult(\n lroPrimitives,\n config.resourceLocation,\n lroResourceLocationConfig\n );\n }\n case \"Body\": {\n return processBodyPollingOperationResult;\n }\n default: {\n return processPassthroughOperationResult;\n }\n }\n}\n\n/**\n * Creates a polling operation.\n */\nexport function createPoll(\n lroPrimitives: LongRunningOperation\n): (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n) => Promise> {\n return async (\n path: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ): Promise> => {\n const response = await lroPrimitives.sendPollRequest(path);\n const retryAfter: string | undefined = response.rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n pollerConfig.intervalInMs = isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs)\n : retryAfterInSeconds * 1000;\n }\n return getLroStatusFromResponse(response);\n };\n}\n\nfunction calculatePollingIntervalFromDate(\n retryAfterDate: Date,\n defaultIntervalInMs: number\n): number {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return defaultIntervalInMs;\n}\n\n/**\n * Creates a callback to be used to initialize the polling operation state.\n * @param state - of the polling operation\n * @param operationSpec - of the LRO\n * @param callback - callback to be called when the operation is done\n * @returns callback that initializes the state of the polling operation\n */\nexport function createInitializeState(\n state: ResumablePollOperationState,\n requestPath: string,\n requestMethod: string\n): (response: LroResponse) => boolean {\n return (response: LroResponse): boolean => {\n if (isUnexpectedInitialResponse(response.rawResponse)) return true;\n state.initialRawResponse = response.rawResponse;\n state.isStarted = true;\n state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath);\n state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse);\n /** short circuit polling if body polling is done in the initial request */\n if (\n state.config.mode === undefined ||\n (state.config.mode === \"Body\" && isBodyPollingDone(state.initialRawResponse))\n ) {\n state.result = response.flatResponse as TResult;\n state.isCompleted = true;\n }\n logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`);\n return Boolean(state.isCompleted);\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js new file mode 100644 index 00000000..f8c1767d --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map new file mode 100644 index 00000000..96f166b8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../src/pollOperation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/poller.js b/node_modules/@azure/core-lro/dist-esm/src/poller.js new file mode 100644 index 00000000..bdb550a4 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/poller.js @@ -0,0 +1,390 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +export class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +export class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling() { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + try { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + } + } + catch (e) { + this.operation.state.error = e; + if (this.reject) { + this.reject(e); + } + throw e; + } + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone() { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.stopped) { + this.stopped = true; + } + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/poller.js.map b/node_modules/@azure/core-lro/dist-esm/src/poller.js.map new file mode 100644 index 00000000..ea5abb95 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../src/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAoBlC;;;GAGG;AACH,MAAM,OAAO,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AA2DD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAM,OAAgB,MAAM;IAiB1B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QA/E7C,YAAO,GAAY,IAAI,CAAC;QAMxB,0BAAqB,GAAmC,EAAE,CAAC;QA0EjE,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;SACtB;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;YAC1C,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;YAClB,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;SACpB;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;oBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;iBAC3C,CAAC,CAAC;gBACH,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE;oBACjC,uEAAuE;oBACvE,uEAAuE;oBACvE,oEAAoE;oBACpE,uEAAuE;oBACvE,cAAc;oBACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,MAAiB,CAAC,CAAC;iBACtD;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;aAChB;YACD,MAAM,CAAC,CAAC;SACT;IACH,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE;YACjD,QAAQ,CAAC,KAAK,CAAC,CAAC;SACjB;IACH,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACtD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC;SAC3D;IACH,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC1F;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACxC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;aACvE;SACF;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;SACrB;QACD,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;SAC/C;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n }\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-http\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll();\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n try {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.operation.state.result as TResult);\n }\n }\n } catch (e) {\n this.operation.state.error = e;\n if (this.reject) {\n this.reject(e);\n }\n throw e;\n }\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(): Promise {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/index.js b/node_modules/@azure/core-lro/dist/index.js new file mode 100644 index 00000000..3f30fc9c --- /dev/null +++ b/node_modules/@azure/core-lro/dist/index.js @@ -0,0 +1,751 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var logger$1 = require('@azure/logger'); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling() { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + try { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + } + } + catch (e) { + this.operation.state.error = e; + if (this.reject) { + this.reject(e); + } + throw e; + } + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone() { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.stopped) { + this.stopped = true; + } + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Detects where the continuation token is and returns it. Notice that azure-asyncoperation + * must be checked first before the other location headers because there are scenarios + * where both azure-asyncoperation and location could be present in the same response but + * azure-asyncoperation should be the one to use for polling. + */ +function getPollingUrl(rawResponse, defaultPath) { + var _a, _b, _c; + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); +} +function getLocation(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocation(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperation(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } + } +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || + getOperationLocation(rawResponse) !== undefined) { + return { + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", + }; + } + else if (["PUT", "PATCH"].includes(requestMethod)) { + return { + mode: "Body", + }; + } + return {}; +} +class SimpleRestError extends Error { + constructor(message, statusCode) { + super(message); + this.name = "RestError"; + this.statusCode = statusCode; + Object.setPrototypeOf(this, SimpleRestError.prototype); + } +} +function isUnexpectedInitialResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![203, 204, 202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); + } + return false; +} +function isUnexpectedPollingResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); + } + return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const successStates = ["succeeded"]; +const failureStates = ["failed", "canceled", "cancelled"]; + +// Copyright (c) Microsoft Corporation. +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return typeof state === "string" ? state.toLowerCase() : "succeeded"; +} +function isBodyPollingDone(rawResponse) { + const state = getProvisioningState(rawResponse); + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +function processBodyPollingOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function processPassthroughOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: true }); +} + +// Copyright (c) Microsoft Corporation. +/** + * creates a stepping function that maps an LRO state to another. + */ +function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { + switch (config.mode) { + case "Location": { + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); + } + case "Body": { + return processBodyPollingOperationResult; + } + default: { + return processPassthroughOperationResult; + } + } +} +/** + * Creates a polling operation. + */ +function createPoll(lroPrimitives) { + return async (path, pollerConfig, getLroStatusFromResponse) => { + const response = await lroPrimitives.sendPollRequest(path); + const retryAfter = response.rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) + : retryAfterInSeconds * 1000; + } + return getLroStatusFromResponse(response); + }; +} +function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return defaultIntervalInMs; +} +/** + * Creates a callback to be used to initialize the polling operation state. + * @param state - of the polling operation + * @param operationSpec - of the LRO + * @param callback - callback to be called when the operation is done + * @returns callback that initializes the state of the polling operation + */ +function createInitializeState(state, requestPath, requestMethod) { + return (response) => { + if (isUnexpectedInitialResponse(response.rawResponse)) + ; + state.initialRawResponse = response.rawResponse; + state.isStarted = true; + state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); + state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); + /** short circuit polling if body polling is done in the initial request */ + if (state.config.mode === undefined || + (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { + state.result = response.flatResponse; + state.isCompleted = true; + } + logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); + return Boolean(state.isCompleted); + }; +} + +// Copyright (c) Microsoft Corporation. +class GenericPollOperation { + constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + /** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ + async update(options) { + var _a, _b, _c; + const state = this.state; + let lastResponse = undefined; + if (!state.isStarted) { + const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); + lastResponse = await this.lro.sendInitialRequest(); + initializeState(lastResponse); + } + if (!state.isCompleted) { + if (!this.poll || !this.getLroStatusFromResponse) { + if (!state.config) { + throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); + } + const isDone = this.isDone; + this.getLroStatusFromResponse = isDone + ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) + : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); + this.poll = createPoll(this.lro); + } + if (!state.pollingURL) { + throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); + } + const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); + logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); + if (currentState.done) { + state.result = this.processResult + ? this.processResult(currentState.flatResponse, state) + : currentState.flatResponse; + state.isCompleted = true; + } + else { + this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; + state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); + } + lastResponse = currentState; + } + logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); + if (lastResponse) { + (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); + } + else { + logger.error(`LRO: no response was received`); + } + (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); + return this; + } + async cancel() { + this.state.isCancelled = true; + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} + +// Copyright (c) Microsoft Corporation. +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); + } +} +/** + * The LRO Engine, a class that performs polling. + */ +class LroEngine extends Poller { + constructor(lro, options) { + const { intervalInMs = 2000, resumeFrom } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + super(operation); + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} + +exports.LroEngine = LroEngine; +exports.Poller = Poller; +exports.PollerCancelledError = PollerCancelledError; +exports.PollerStoppedError = PollerStoppedError; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-lro/dist/index.js.map b/node_modules/@azure/core-lro/dist/index.js.map new file mode 100644 index 00000000..9aa28f79 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/poller.ts","../src/lroEngine/requestUtils.ts","../src/lroEngine/models.ts","../src/lroEngine/bodyPolling.ts","../src/lroEngine/logger.ts","../src/lroEngine/locationPolling.ts","../src/lroEngine/passthrough.ts","../src/lroEngine/stateMachine.ts","../src/lroEngine/operation.ts","../src/lroEngine/lroEngine.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n }\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-http\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll();\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n try {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.operation.state.result as TResult);\n }\n }\n } catch (e) {\n this.operation.state.error = e;\n if (this.reject) {\n this.reject(e);\n }\n throw e;\n }\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(): Promise {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroConfig, RawResponse } from \"./models\";\n\n/**\n * Detects where the continuation token is and returns it. Notice that azure-asyncoperation\n * must be checked first before the other location headers because there are scenarios\n * where both azure-asyncoperation and location could be present in the same response but\n * azure-asyncoperation should be the one to use for polling.\n */\nexport function getPollingUrl(rawResponse: RawResponse, defaultPath: string): string {\n return (\n getAzureAsyncOperation(rawResponse) ??\n getOperationLocation(rawResponse) ??\n getLocation(rawResponse) ??\n defaultPath\n );\n}\n\nfunction getLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(\n requestMethod: string,\n rawResponse: RawResponse,\n requestPath: string\n): string | undefined {\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"POST\":\n case \"PATCH\": {\n return getLocation(rawResponse);\n }\n default: {\n return undefined;\n }\n }\n}\n\nexport function inferLroMode(\n requestPath: string,\n requestMethod: string,\n rawResponse: RawResponse\n): LroConfig {\n if (\n getAzureAsyncOperation(rawResponse) !== undefined ||\n getOperationLocation(rawResponse) !== undefined\n ) {\n return {\n mode: \"Location\",\n resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath),\n };\n } else if (getLocation(rawResponse) !== undefined) {\n return {\n mode: \"Location\",\n };\n } else if ([\"PUT\", \"PATCH\"].includes(requestMethod)) {\n return {\n mode: \"Body\",\n };\n }\n return {};\n}\n\nclass SimpleRestError extends Error {\n public statusCode?: number;\n constructor(message: string, statusCode: number) {\n super(message);\n this.name = \"RestError\";\n this.statusCode = statusCode;\n\n Object.setPrototypeOf(this, SimpleRestError.prototype);\n }\n}\n\nexport function isUnexpectedInitialResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![203, 204, 202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n\nexport function isUnexpectedPollingResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperationState } from \"../pollOperation\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n}\n\nexport const successStates = [\"succeeded\"];\nexport const failureStates = [\"failed\", \"canceled\", \"cancelled\"];\n/**\n * The LRO states that signal that the LRO has completed.\n */\nexport const terminalStates = successStates.concat(failureStates);\n\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface LroBody extends Record {\n /** The status of the operation. */\n status?: string;\n /** The state of the provisioning process */\n provisioningState?: string;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: string } & Record;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/** The type of which LRO implementation being followed by a specific API. */\nexport type LroMode = \"Location\" | \"Body\";\n\n/**\n * The configuration of a LRO to determine how to perform polling and checking whether the operation has completed.\n */\nexport interface LroConfig {\n /** The LRO mode */\n mode?: LroMode;\n /** The path of a provisioned resource */\n resourceLocation?: string;\n}\n\n/**\n * Type of a polling operation state that can actually be resumed.\n */\nexport type ResumablePollOperationState = PollOperationState & {\n initialRawResponse?: RawResponse;\n config?: LroConfig;\n pollingURL?: string;\n};\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n\n/**\n * The type of a terminal state of an LRO.\n */\nexport interface LroTerminalState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: true;\n}\n\n/**\n * The type of an in-progress state of an LRO.\n */\nexport interface LroInProgressState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: false;\n /**\n * The request to be sent next if it is different from the standard polling one.\n * Notice that it will disregard any polling URLs provided to it.\n */\n next?: () => Promise>;\n}\n\n/**\n * The type of an LRO state which is a tagged union of terminal and in-progress states.\n */\nexport type LroStatus = LroTerminalState | LroInProgressState;\n\n/**\n * The type of the getLROStatusFromResponse method. It takes the response as input and returns along the response whether the operation has finished.\n */\nexport type GetLroStatusFromResponse = (response: LroResponse) => LroStatus;\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path.\n */\n requestPath: string;\n /**\n * The HTTP request method.\n */\n requestMethod: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (path: string) => Promise>;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroBody,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction getProvisioningState(rawResponse: RawResponse): string {\n const { properties, provisioningState } = (rawResponse.body as LroBody) ?? {};\n const state: string | undefined = properties?.provisioningState ?? provisioningState;\n return typeof state === \"string\" ? state.toLowerCase() : \"succeeded\";\n}\n\nexport function isBodyPollingDone(rawResponse: RawResponse): boolean {\n const state = getProvisioningState(rawResponse);\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Creates a polling strategy based on BodyPolling which uses the provisioning state\n * from the result to determine the current operation state\n */\nexport function processBodyPollingOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: isBodyPollingDone(response.rawResponse),\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroBody,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction isPollingDone(rawResponse: RawResponse): boolean {\n if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) {\n return false;\n }\n const { status } = (rawResponse.body as LroBody) ?? {};\n const state = typeof status === \"string\" ? status.toLowerCase() : \"succeeded\";\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Sends a request to the URI of the provisioned resource if needed.\n */\nasync function sendFinalRequest(\n lro: LongRunningOperation,\n resourceLocation: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): Promise | undefined> {\n switch (lroResourceLocationConfig) {\n case \"original-uri\":\n return lro.sendPollRequest(lro.requestPath);\n case \"azure-async-operation\":\n return undefined;\n case \"location\":\n default:\n return lro.sendPollRequest(resourceLocation ?? lro.requestPath);\n }\n}\n\nexport function processLocationPollingOperationResult(\n lro: LongRunningOperation,\n resourceLocation?: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): (response: LroResponse) => LroStatus {\n return (response: LroResponse): LroStatus => {\n if (isPollingDone(response.rawResponse)) {\n if (resourceLocation === undefined) {\n return { ...response, done: true };\n } else {\n return {\n ...response,\n done: false,\n next: async () => {\n const finalResponse = await sendFinalRequest(\n lro,\n resourceLocation,\n lroResourceLocationConfig\n );\n return {\n ...(finalResponse ?? response),\n done: true,\n };\n },\n };\n }\n }\n return {\n ...response,\n done: false,\n };\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResponse, LroStatus } from \"./models\";\n\nexport function processPassthroughOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: true,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroConfig,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from \"./requestUtils\";\nimport { isBodyPollingDone, processBodyPollingOperationResult } from \"./bodyPolling\";\nimport { logger } from \"./logger\";\nimport { processLocationPollingOperationResult } from \"./locationPolling\";\nimport { processPassthroughOperationResult } from \"./passthrough\";\n\n/**\n * creates a stepping function that maps an LRO state to another.\n */\nexport function createGetLroStatusFromResponse(\n lroPrimitives: LongRunningOperation,\n config: LroConfig,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): GetLroStatusFromResponse {\n switch (config.mode) {\n case \"Location\": {\n return processLocationPollingOperationResult(\n lroPrimitives,\n config.resourceLocation,\n lroResourceLocationConfig\n );\n }\n case \"Body\": {\n return processBodyPollingOperationResult;\n }\n default: {\n return processPassthroughOperationResult;\n }\n }\n}\n\n/**\n * Creates a polling operation.\n */\nexport function createPoll(\n lroPrimitives: LongRunningOperation\n): (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n) => Promise> {\n return async (\n path: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ): Promise> => {\n const response = await lroPrimitives.sendPollRequest(path);\n const retryAfter: string | undefined = response.rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n pollerConfig.intervalInMs = isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs)\n : retryAfterInSeconds * 1000;\n }\n return getLroStatusFromResponse(response);\n };\n}\n\nfunction calculatePollingIntervalFromDate(\n retryAfterDate: Date,\n defaultIntervalInMs: number\n): number {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return defaultIntervalInMs;\n}\n\n/**\n * Creates a callback to be used to initialize the polling operation state.\n * @param state - of the polling operation\n * @param operationSpec - of the LRO\n * @param callback - callback to be called when the operation is done\n * @returns callback that initializes the state of the polling operation\n */\nexport function createInitializeState(\n state: ResumablePollOperationState,\n requestPath: string,\n requestMethod: string\n): (response: LroResponse) => boolean {\n return (response: LroResponse): boolean => {\n if (isUnexpectedInitialResponse(response.rawResponse)) return true;\n state.initialRawResponse = response.rawResponse;\n state.isStarted = true;\n state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath);\n state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse);\n /** short circuit polling if body polling is done in the initial request */\n if (\n state.config.mode === undefined ||\n (state.config.mode === \"Body\" && isBodyPollingDone(state.initialRawResponse))\n ) {\n state.result = response.flatResponse as TResult;\n state.isCompleted = true;\n }\n logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`);\n return Boolean(state.isCompleted);\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n RawResponse,\n ResumablePollOperationState,\n} from \"./models\";\nimport { PollOperation, PollOperationState } from \"../pollOperation\";\nimport { createGetLroStatusFromResponse, createInitializeState, createPoll } from \"./stateMachine\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { getPollingUrl } from \"./requestUtils\";\nimport { logger } from \"./logger\";\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private poll?: (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ) => Promise>;\n private pollerConfig?: PollerConfig;\n private getLroStatusFromResponse?: GetLroStatusFromResponse;\n\n constructor(\n public state: TState & ResumablePollOperationState,\n private lro: LongRunningOperation,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n /**\n * General update function for LROPoller, the general process is as follows\n * 1. Check initial operation result to determine the strategy to use\n * - Strategies: Location, Azure-AsyncOperation, Original Uri\n * 2. Check if the operation result has a terminal state\n * - Terminal state will be determined by each strategy\n * 2.1 If it is terminal state Check if a final GET request is required, if so\n * send final GET request and return result from operation. If no final GET\n * is required, just return the result from operation.\n * - Determining what to call for final request is responsibility of each strategy\n * 2.2 If it is not terminal state, call the polling operation and go to step 1\n * - Determining what to call for polling is responsibility of each strategy\n * - Strategies will always use the latest URI for polling if provided otherwise\n * the last known one\n */\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const state = this.state;\n let lastResponse: LroResponse | undefined = undefined;\n if (!state.isStarted) {\n const initializeState = createInitializeState(\n state,\n this.lro.requestPath,\n this.lro.requestMethod\n );\n lastResponse = await this.lro.sendInitialRequest();\n initializeState(lastResponse);\n }\n\n if (!state.isCompleted) {\n if (!this.poll || !this.getLroStatusFromResponse) {\n if (!state.config) {\n throw new Error(\n \"Bad state: LRO mode is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const isDone = this.isDone;\n this.getLroStatusFromResponse = isDone\n ? (response: LroResponse) => ({\n ...response,\n done: isDone(response.flatResponse, this.state),\n })\n : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig);\n this.poll = createPoll(this.lro);\n }\n if (!state.pollingURL) {\n throw new Error(\n \"Bad state: polling URL is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const currentState = await this.poll(\n state.pollingURL,\n this.pollerConfig!,\n this.getLroStatusFromResponse\n );\n logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`);\n if (currentState.done) {\n state.result = this.processResult\n ? this.processResult(currentState.flatResponse, state)\n : currentState.flatResponse;\n state.isCompleted = true;\n } else {\n this.poll = currentState.next ?? this.poll;\n state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL);\n }\n lastResponse = currentState;\n }\n logger.verbose(`LRO: current state: ${JSON.stringify(state)}`);\n if (lastResponse) {\n this.updateState?.(state, lastResponse?.rawResponse);\n } else {\n logger.error(`LRO: no response was received`);\n }\n options?.fireProgress?.(state);\n return this;\n }\n\n async cancel(): Promise> {\n this.state.isCancelled = true;\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroEngineOptions,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { GenericPollOperation } from \"./operation\";\nimport { PollOperationState } from \"../pollOperation\";\nimport { Poller } from \"../poller\";\n\nfunction deserializeState(\n serializedState: string\n): TState & ResumablePollOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`);\n }\n}\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const { intervalInMs = 2000, resumeFrom } = options || {};\n const state: TState & ResumablePollOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as TState & ResumablePollOperationState);\n\n const operation = new GenericPollOperation(\n state,\n lro,\n options?.lroResourceLocationConfig,\n options?.processResult,\n options?.updateState,\n options?.isDone\n );\n super(operation);\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"],"names":["createClientLogger"],"mappings":";;;;;;AAAA;AACA;AAoBA;;;AAGG;AACG,MAAO,kBAAmB,SAAQ,KAAK,CAAA;AAC3C,IAAA,WAAA,CAAY,OAAe,EAAA;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;KAC3D;AACF,CAAA;AAED;;;AAGG;AACG,MAAO,oBAAqB,SAAQ,KAAK,CAAA;AAC7C,IAAA,WAAA,CAAY,OAAe,EAAA;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;KAC7D;AACF,CAAA;AA2DD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4DG;AACH;MACsB,MAAM,CAAA;AAiB1B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAgEG;AACH,IAAA,WAAA,CAAY,SAAyC,EAAA;QA/E7C,IAAO,CAAA,OAAA,GAAY,IAAI,CAAC;QAMxB,IAAqB,CAAA,qBAAA,GAAmC,EAAE,CAAC;AA0EjE,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,KACxE;AACF,YAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACvB,SAAC,CACF,CAAC;;;;AAIF,QAAA,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAK;;AAExB,SAAC,CAAC,CAAC;KACJ;AAyBD;;;AAGG;AACK,IAAA,MAAM,YAAY,GAAA;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;AACtB,SAAA;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;AAC1C,YAAA,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;AAClB,YAAA,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;AACpB,SAAA;KACF;AAED;;;;;;;AAOG;AACK,IAAA,MAAM,QAAQ,CAAC,OAAA,GAA6C,EAAE,EAAA;QACpE,IAAI;AACF,YAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;oBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;AAC3C,iBAAA,CAAC,CAAC;gBACH,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE;;;;;;oBAMjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,MAAiB,CAAC,CAAC;AACtD,iBAAA;AACF,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,gBAAA,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAChB,aAAA;AACD,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;KACF;AAED;;;;;;;AAOG;AACK,IAAA,YAAY,CAAC,KAAa,EAAA;AAChC,QAAA,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE;YACjD,QAAQ,CAAC,KAAK,CAAC,CAAC;AACjB,SAAA;KACF;AAED;;;AAGG;AACK,IAAA,MAAM,UAAU,CAAC,OAAA,GAA6C,EAAE,EAAA;AACtE,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACtD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC;AAC3D,SAAA;KACF;AAED;;;;;;;AAOG;IACI,IAAI,CAAC,UAA6C,EAAE,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,MAAW;AACtC,gBAAA,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;AACnC,aAAC,CAAC;AACF,YAAA,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC1F,SAAA;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;KAC7B;AAED;;AAEG;AACI,IAAA,MAAM,aAAa,GAAA;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxC,SAAA;QACD,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;;;AAKG;AACI,IAAA,UAAU,CAAC,QAAiC,EAAA;AACjD,QAAA,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AAC1C,QAAA,OAAO,MAAW;AAChB,YAAA,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,QAAQ,CAAC,CAAC;AACxF,SAAC,CAAC;KACH;AAED;;AAEG;IACI,MAAM,GAAA;AACX,QAAA,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AAChE,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;KACvE;AAED;;AAEG;IACI,WAAW,GAAA;AAChB,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;AACjB,YAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;AACvE,aAAA;AACF,SAAA;KACF;AAED;;AAEG;IACI,SAAS,GAAA;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;;;;;;AAQG;IACI,eAAe,CAAC,UAA6C,EAAE,EAAA;AACpE,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;AACjB,YAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;AACrB,SAAA;AACD,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;AAC/C,SAAA;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;AAC1D,SAAA;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CG;IACI,iBAAiB,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;KAC7B;AAED;;;;;AAKG;IACI,SAAS,GAAA;AACd,QAAA,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;KACrB;AAED;;;AAGG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;KAClC;AACF;;ACxgBD;AACA;AAIA;;;;;AAKG;AACa,SAAA,aAAa,CAAC,WAAwB,EAAE,WAAmB,EAAA;;IACzE,QACE,MAAA,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,sBAAsB,CAAC,WAAW,CAAC,MACnC,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,oBAAoB,CAAC,WAAW,CAAC,mCACjC,WAAW,CAAC,WAAW,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GACxB,WAAW,EACX;AACJ,CAAC;AAED,SAAS,WAAW,CAAC,WAAwB,EAAA;AAC3C,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB,EAAA;AACpD,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,sBAAsB,CAAC,WAAwB,EAAA;AACtD,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAC3B,aAAqB,EACrB,WAAwB,EACxB,WAAmB,EAAA;AAEnB,IAAA,QAAQ,aAAa;QACnB,KAAK,KAAK,EAAE;AACV,YAAA,OAAO,WAAW,CAAC;AACpB,SAAA;AACD,QAAA,KAAK,MAAM,CAAC;QACZ,KAAK,OAAO,EAAE;AACZ,YAAA,OAAO,WAAW,CAAC,WAAW,CAAC,CAAC;AACjC,SAAA;AACD,QAAA,SAAS;AACP,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AACH,CAAC;SAEe,YAAY,CAC1B,WAAmB,EACnB,aAAqB,EACrB,WAAwB,EAAA;AAExB,IAAA,IACE,sBAAsB,CAAC,WAAW,CAAC,KAAK,SAAS;AACjD,QAAA,oBAAoB,CAAC,WAAW,CAAC,KAAK,SAAS,EAC/C;QACA,OAAO;AACL,YAAA,IAAI,EAAE,UAAU;YAChB,gBAAgB,EAAE,oBAAoB,CAAC,aAAa,EAAE,WAAW,EAAE,WAAW,CAAC;SAChF,CAAC;AACH,KAAA;AAAM,SAAA,IAAI,WAAW,CAAC,WAAW,CAAC,KAAK,SAAS,EAAE;QACjD,OAAO;AACL,YAAA,IAAI,EAAE,UAAU;SACjB,CAAC;AACH,KAAA;SAAM,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,EAAE;QACnD,OAAO;AACL,YAAA,IAAI,EAAE,MAAM;SACb,CAAC;AACH,KAAA;AACD,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,MAAM,eAAgB,SAAQ,KAAK,CAAA;IAEjC,WAAY,CAAA,OAAe,EAAE,UAAkB,EAAA;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AACxB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAC;KACxD;AACF,CAAA;AAEK,SAAU,2BAA2B,CAAC,WAAwB,EAAA;AAClE,IAAA,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;AACpC,IAAA,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QAClD,MAAM,IAAI,eAAe,CACvB,CAAA,qCAAA,EAAwC,IAAI,CAA6D,2DAAA,CAAA,EACzG,IAAI,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAEK,SAAU,2BAA2B,CAAC,WAAwB,EAAA;AAClE,IAAA,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;AACpC,IAAA,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QACxC,MAAM,IAAI,eAAe,CACvB,CAAA,qCAAA,EAAwC,IAAI,CAAmD,iDAAA,CAAA,EAC/F,IAAI,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf;;AC3GA;AACA;AAkCO,MAAM,aAAa,GAAG,CAAC,WAAW,CAAC,CAAC;AACpC,MAAM,aAAa,GAAG,CAAC,QAAQ,EAAE,UAAU,EAAE,WAAW,CAAC;;ACpChE;AAaA,SAAS,oBAAoB,CAAC,WAAwB,EAAA;;AACpD,IAAA,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAA,GAAA,WAAW,CAAC,IAAgB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AAC9E,IAAA,MAAM,KAAK,GAAuB,CAAA,EAAA,GAAA,UAAU,KAAV,IAAA,IAAA,UAAU,KAAV,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,UAAU,CAAE,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,iBAAiB,CAAC;AACrF,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC;AACvE,CAAC;AAEK,SAAU,iBAAiB,CAAC,WAAwB,EAAA;AACxD,IAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAC7E,QAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,CAAA,CAAA,CAAG,CAAC,CAAC;AAC7F,KAAA;AACD,IAAA,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;;AAGG;AACG,SAAU,iCAAiC,CAC/C,QAA8B,EAAA;IAE9B,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,EAC7C,CAAA,CAAA;AACJ;;ACtCA;AAKA;;;AAGG;AACI,MAAM,MAAM,GAAGA,2BAAkB,CAAC,UAAU,CAAC;;ACTpD;AAeA,SAAS,aAAa,CAAC,WAAwB,EAAA;;IAC7C,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,WAAW,CAAC,UAAU,KAAK,GAAG,EAAE;AAC9E,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IACD,MAAM,EAAE,MAAM,EAAE,GAAG,CAAA,EAAA,GAAC,WAAW,CAAC,IAAgB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AACvD,IAAA,MAAM,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC;IAC9E,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAC7E,QAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,CAAA,CAAA,CAAG,CAAC,CAAC;AAC7F,KAAA;AACD,IAAA,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;AAEG;AACH,eAAe,gBAAgB,CAC7B,GAAkC,EAClC,gBAAwB,EACxB,yBAAqD,EAAA;AAErD,IAAA,QAAQ,yBAAyB;AAC/B,QAAA,KAAK,cAAc;YACjB,OAAO,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AAC9C,QAAA,KAAK,uBAAuB;AAC1B,YAAA,OAAO,SAAS,CAAC;AACnB,QAAA,KAAK,UAAU,CAAC;AAChB,QAAA;AACE,YAAA,OAAO,GAAG,CAAC,eAAe,CAAC,gBAAgB,KAAhB,IAAA,IAAA,gBAAgB,KAAhB,KAAA,CAAA,GAAA,gBAAgB,GAAI,GAAG,CAAC,WAAW,CAAC,CAAC;AACnE,KAAA;AACH,CAAC;SAEe,qCAAqC,CACnD,GAAkC,EAClC,gBAAyB,EACzB,yBAAqD,EAAA;IAErD,OAAO,CAAC,QAA8B,KAAwB;AAC5D,QAAA,IAAI,aAAa,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;YACvC,IAAI,gBAAgB,KAAK,SAAS,EAAE;AAClC,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,QAAQ,CAAA,EAAA,EAAE,IAAI,EAAE,IAAI,EAAG,CAAA,CAAA;AACpC,aAAA;AAAM,iBAAA;gBACL,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,KAAK,EACX,IAAI,EAAE,YAAW;wBACf,MAAM,aAAa,GAAG,MAAM,gBAAgB,CAC1C,GAAG,EACH,gBAAgB,EAChB,yBAAyB,CAC1B,CAAC;AACF,wBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,GACM,aAAa,KAAb,IAAA,IAAA,aAAa,KAAb,KAAA,CAAA,GAAA,aAAa,GAAI,QAAQ,EAAC,EAAA,EAC9B,IAAI,EAAE,IAAI,EACV,CAAA,CAAA;AACJ,qBAAC,EACD,CAAA,CAAA;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,QAAQ,CAAA,EAAA,EACX,IAAI,EAAE,KAAK,EACX,CAAA,CAAA;AACJ,KAAC,CAAC;AACJ;;AC9EA;AACA;AAIM,SAAU,iCAAiC,CAC/C,QAA8B,EAAA;AAE9B,IAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,QAAQ,CAAA,EAAA,EACX,IAAI,EAAE,IAAI,EACV,CAAA,CAAA;AACJ;;ACZA;AAmBA;;AAEG;SACa,8BAA8B,CAC5C,aAA4C,EAC5C,MAAiB,EACjB,yBAAqD,EAAA;IAErD,QAAQ,MAAM,CAAC,IAAI;QACjB,KAAK,UAAU,EAAE;YACf,OAAO,qCAAqC,CAC1C,aAAa,EACb,MAAM,CAAC,gBAAgB,EACvB,yBAAyB,CAC1B,CAAC;AACH,SAAA;QACD,KAAK,MAAM,EAAE;AACX,YAAA,OAAO,iCAAiC,CAAC;AAC1C,SAAA;AACD,QAAA,SAAS;AACP,YAAA,OAAO,iCAAiC,CAAC;AAC1C,SAAA;AACF,KAAA;AACH,CAAC;AAED;;AAEG;AACG,SAAU,UAAU,CACxB,aAA4C,EAAA;IAM5C,OAAO,OACL,IAAY,EACZ,YAA0B,EAC1B,wBAA2D,KAC5B;QAC/B,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAuB,QAAQ,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;QACnF,IAAI,UAAU,KAAK,SAAS,EAAE;;AAE5B,YAAA,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;AACjD,YAAA,YAAY,CAAC,YAAY,GAAG,KAAK,CAAC,mBAAmB,CAAC;AACpD,kBAAE,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,YAAY,CAAC;AACnF,kBAAE,mBAAmB,GAAG,IAAI,CAAC;AAChC,SAAA;AACD,QAAA,OAAO,wBAAwB,CAAC,QAAQ,CAAC,CAAC;AAC5C,KAAC,CAAC;AACJ,CAAC;AAED,SAAS,gCAAgC,CACvC,cAAoB,EACpB,mBAA2B,EAAA;AAE3B,IAAA,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;AACjD,IAAA,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE;QAC5B,OAAO,cAAc,GAAG,OAAO,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED;;;;;;AAMG;SACa,qBAAqB,CACnC,KAA2C,EAC3C,WAAmB,EACnB,aAAqB,EAAA;IAErB,OAAO,CAAC,QAA8B,KAAa;AACjD,QAAA,IAAI,2BAA2B,CAAC,QAAQ,CAAC,WAAW,CAAC;AAAE,YAAA,CAAY;AACnE,QAAA,KAAK,CAAC,kBAAkB,GAAG,QAAQ,CAAC,WAAW,CAAC;AAChD,QAAA,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACxE,QAAA,KAAK,CAAC,MAAM,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC,kBAAkB,CAAC,CAAC;;AAElF,QAAA,IACE,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,SAAS;AAC/B,aAAC,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,MAAM,IAAI,iBAAiB,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,EAC7E;AACA,YAAA,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC,YAAuB,CAAC;AAChD,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACD,QAAA,MAAM,CAAC,OAAO,CAAC,CAAA,oBAAA,EAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAE,CAAA,CAAC,CAAC;AAC/D,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AACpC,KAAC,CAAC;AACJ;;ACjHA;MAmBa,oBAAoB,CAAA;IAW/B,WACS,CAAA,KAAoD,EACnD,GAAkC,EAClC,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D,EAAA;QAL3D,IAAK,CAAA,KAAA,GAAL,KAAK,CAA+C;QACnD,IAAG,CAAA,GAAA,GAAH,GAAG,CAA+B;QAClC,IAAyB,CAAA,yBAAA,GAAzB,yBAAyB,CAA4B;QACrD,IAAa,CAAA,aAAA,GAAb,aAAa,CAA8C;QAC3D,IAAW,CAAA,WAAA,GAAX,WAAW,CAAqD;QAChE,IAAM,CAAA,MAAA,GAAN,MAAM,CAAoD;KAChE;AAEG,IAAA,eAAe,CAAC,YAA0B,EAAA;AAC/C,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;AAED;;;;;;;;;;;;;;AAcG;IACH,MAAM,MAAM,CAAC,OAGZ,EAAA;;AACC,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;QACzB,IAAI,YAAY,GAAqC,SAAS,CAAC;AAC/D,QAAA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AACpB,YAAA,MAAM,eAAe,GAAG,qBAAqB,CAC3C,KAAK,EACL,IAAI,CAAC,GAAG,CAAC,WAAW,EACpB,IAAI,CAAC,GAAG,CAAC,aAAa,CACvB,CAAC;YACF,YAAY,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,kBAAkB,EAAE,CAAC;YACnD,eAAe,CAAC,YAAY,CAAC,CAAC;AAC/B,SAAA;AAED,QAAA,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,wBAAwB,EAAE;AAChD,gBAAA,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE;AACjB,oBAAA,MAAM,IAAI,KAAK,CACb,wFAAwF,CACzF,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;gBAC3B,IAAI,CAAC,wBAAwB,GAAG,MAAM;sBAClC,CAAC,QAA8B,MAAK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAC/B,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,EAC/C,CAAA,CAAA;AACJ,sBAAE,8BAA8B,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,yBAAyB,CAAC,CAAC;gBAC3F,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClC,aAAA;AACD,YAAA,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;AACrB,gBAAA,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;AACH,aAAA;AACD,YAAA,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,IAAI,CAClC,KAAK,CAAC,UAAU,EAChB,IAAI,CAAC,YAAa,EAClB,IAAI,CAAC,wBAAwB,CAC9B,CAAC;AACF,YAAA,MAAM,CAAC,OAAO,CAAC,CAAA,uBAAA,EAA0B,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,WAAW,CAAC,CAAA,CAAE,CAAC,CAAC;YACrF,IAAI,YAAY,CAAC,IAAI,EAAE;AACrB,gBAAA,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,aAAa;sBAC7B,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,YAAY,EAAE,KAAK,CAAC;AACtD,sBAAE,YAAY,CAAC,YAAY,CAAC;AAC9B,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;AAAM,iBAAA;gBACL,IAAI,CAAC,IAAI,GAAG,CAAA,EAAA,GAAA,YAAY,CAAC,IAAI,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,IAAI,CAAC,IAAI,CAAC;AAC3C,gBAAA,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,YAAY,CAAC,WAAW,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;AAC9E,aAAA;YACD,YAAY,GAAG,YAAY,CAAC;AAC7B,SAAA;AACD,QAAA,MAAM,CAAC,OAAO,CAAC,CAAA,oBAAA,EAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAE,CAAA,CAAC,CAAC;AAC/D,QAAA,IAAI,YAAY,EAAE;AAChB,YAAA,CAAA,EAAA,GAAA,IAAI,CAAC,WAAW,MAAhB,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,IAAA,CAAA,IAAI,EAAe,KAAK,EAAE,YAAY,KAAA,IAAA,IAAZ,YAAY,KAAZ,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,YAAY,CAAE,WAAW,CAAC,CAAC;AACtD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,CAAC,KAAK,CAAC,CAAA,6BAAA,CAA+B,CAAC,CAAC;AAC/C,SAAA;AACD,QAAA,CAAA,EAAA,GAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,YAAY,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,IAAA,CAArB,OAAO,EAAiB,KAAK,CAAC,CAAC;AAC/B,QAAA,OAAO,IAAI,CAAC;KACb;AAED,IAAA,MAAM,MAAM,GAAA;AACV,QAAA,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC9B,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;AAEG;IACI,QAAQ,GAAA;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;AAClB,SAAA,CAAC,CAAC;KACJ;AACF;;ACvID;AAaA,SAAS,gBAAgB,CACvB,eAAuB,EAAA;IAEvB,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;AAC1C,KAAA;AAAC,IAAA,OAAO,CAAC,EAAE;AACV,QAAA,MAAM,IAAI,KAAK,CAAC,2CAA2C,eAAe,CAAA,CAAE,CAAC,CAAC;AAC/E,KAAA;AACH,CAAC;AAED;;AAEG;AACG,MAAO,SAA+D,SAAQ,MAGnF,CAAA;IAGC,WAAY,CAAA,GAAkC,EAAE,OAA2C,EAAA;QACzF,MAAM,EAAE,YAAY,GAAG,IAAI,EAAE,UAAU,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAkD,UAAU;AACrE,cAAE,gBAAgB,CAAC,UAAU,CAAC;cAC3B,EAAoD,CAAC;AAE1D,QAAA,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,OAAO,KAAA,IAAA,IAAP,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,yBAAyB,EAClC,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,aAAa,EACtB,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,MAAM,CAChB,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;AAC7C,QAAA,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KACxC;AAED;;AAEG;IACH,KAAK,GAAA;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAK,UAAU,CAAC,MAAM,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;KACxF;AACF;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/README.md b/node_modules/@azure/core-lro/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js b/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json b/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/package.json b/node_modules/@azure/core-lro/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.html b/node_modules/@azure/core-lro/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.js b/node_modules/@azure/core-lro/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-lro/package.json b/node_modules/@azure/core-lro/package.json new file mode 100644 index 00000000..7d7057f5 --- /dev/null +++ b/node_modules/@azure/core-lro/package.json @@ -0,0 +1,133 @@ +{ + "name": "@azure/core-lro", + "author": "Microsoft Corporation", + "sdk-type": "client", + "version": "2.2.4", + "description": "Isomorphic client library for supporting long-running operations in node.js and browser.", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling", + "azure", + "cloud" + ], + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/core-lro.d.ts", + "files": [ + "types/core-lro.d.ts", + "dist-esm/src", + "dist/", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=12.0.0" + }, + "browser": { + "os": false, + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/index.js" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && dev-tool run bundle", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types *.log browser statistics.html coverage src/**/*.js test/**/*.js", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run build:test && npm run unit-test:node && npm run integration-test:node", + "test": "npm run build:test && npm run unit-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "cross-env TS_NODE_FILES=true TS_NODE_COMPILER_OPTIONS=\"{\\\"module\\\":\\\"commonjs\\\"}\" nyc mocha -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"" + }, + "sideEffects": false, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/core-http": "^2.0.0", + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/test-utils": "^1.0.0", + "@microsoft/api-extractor": "^7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "npm-run-all": "^4.1.5", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "uglify-js": "^3.4.9" + } +} diff --git a/node_modules/@azure/core-lro/types/core-lro.d.ts b/node_modules/@azure/core-lro/types/core-lro.d.ts new file mode 100644 index 00000000..be4d99f0 --- /dev/null +++ b/node_modules/@azure/core-lro/types/core-lro.d.ts @@ -0,0 +1,565 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export declare type CancelOnProgress = () => void; + +/** + * Description of a long running operation. + */ +export declare interface LongRunningOperation { + /** + * The request path. + */ + requestPath: string; + /** + * The HTTP request method. + */ + requestMethod: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string) => Promise>; +} + +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} + +/** + * Options for the LRO poller. + */ +export declare interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; +} + +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export declare type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; + +/** + * The type of the response of a LRO. + */ +export declare interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} + +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-http"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} + +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export declare interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} + +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export declare interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} + +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export declare interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} + +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export declare type PollProgressCallback = (state: TState) => void; + +/** + * Simple type of the raw response. + */ +export declare interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} + +export { } diff --git a/node_modules/@azure/core-paging/LICENSE b/node_modules/@azure/core-paging/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/core-paging/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-paging/README.md b/node_modules/@azure/core-paging/README.md new file mode 100644 index 00000000..5fb49772 --- /dev/null +++ b/node_modules/@azure/core-paging/README.md @@ -0,0 +1,61 @@ +# Azure Core Paging client library for JavaScript + +This library provides core types for paging async iterable iterators. + +## Getting started + +### Installation + +If using this as part of another project in the [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) repo, +then run `rush install` after cloning the repo. + +Otherwise, use npm to install this package in your application as follows + +```javascript +npm install @azure/core-paging +``` + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +## Examples + +Example of building with the types: + +```typescript + public listSecrets( + options: ListSecretsOptions = {} + ): PagedAsyncIterableIterator { + const iter = this.listSecretsAll(options); + return { + async next() { return iter.next(); }, + [Symbol.asyncIterator]() { return this; }, + byPage: (settings: PageSettings = {}) => this.listSecretsPage(settings, options), + }; + } +``` + +And using the types: + +``` + for await (let page of client.listSecrets().byPage({ maxPageSize: 2 })) { + for (const secret of page) { + console.log("secret: ", secret); + } + } +``` + +## Next steps + +Try out this package in your application when dealing with async iterable iterators and provide feedback! + +## Troubleshooting + +Log an issue at https://github.com/Azure/azure-sdk-for-js/issues + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-paging%2FREADME.png) diff --git a/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js new file mode 100644 index 00000000..d77960b2 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export function getPagedAsyncIterator(pagedResult) { + var _a; + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +function getItemAsyncIterator(pagedResult) { + return __asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var e_1, _a; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield __await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + yield yield __await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield __await(yield* __asyncDelegator(__asyncValues(pages))); + } + else { + yield __await(yield* __asyncDelegator(__asyncValues(firstVal.value))); + try { + for (var pages_1 = __asyncValues(pages), pages_1_1; pages_1_1 = yield __await(pages_1.next()), !pages_1_1.done;) { + const page = pages_1_1.value; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield __await(yield* __asyncDelegator(__asyncValues(page))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield __await(_a.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); +} +function getPageAsyncIterator(pagedResult, options = {}) { + return __asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield __await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); + yield yield __await(response.page); + while (response.nextPageLink) { + response = yield __await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + yield yield __await(response.page); + } + }); +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map new file mode 100644 index 00000000..daadfa97 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAIlC;;;;;;GAMG;AACH,MAAM,UAAU,qBAAqB,CAMnC,WAAqD;;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,MAAA,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,MAAM,mCACnB,CAAC,CAAC,QAAuB,EAAE,EAAE;YAC3B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,EAAE,CAAC;YAC1D,OAAO,oBAAoB,CAAC,WAAsD,EAAE;gBAClF,QAAQ,EAAE,iBAAiD;gBAC3D,WAAW;aACZ,CAAC,CAAC;QACL,CAAC,CAAC;KACL,CAAC;AACJ,CAAC;AAED,SAAgB,oBAAoB,CAClC,WAAqD;;;QAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,cAAM,KAAK,CAAC,IAAI,EAAE,CAAA,CAAC;QACpC,6FAA6F;QAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;YAClC,oBAAM,QAAQ,CAAC,KAAK,CAAA,CAAC;YACrB,sFAAsF;YACtF,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,KAAmD,CAAA,CAAA,CAAA,CAAC;SAC5D;aAAM;YACL,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,QAAQ,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;;gBACtB,KAAyB,IAAA,UAAA,cAAA,KAAK,CAAA,WAAA;oBAAnB,MAAM,IAAI,kBAAA,CAAA;oBACnB,gGAAgG;oBAChG,gDAAgD;oBAChD,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,IAA6B,CAAA,CAAA,CAAA,CAAC;iBACtC;;;;;;;;;SACF;IACH,CAAC;CAAA;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;;QAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;QAC1C,IAAI,QAAQ,GAAG,cAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAA,CAAC;QAC7F,oBAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;QACpB,OAAO,QAAQ,CAAC,YAAY,EAAE;YAC5B,QAAQ,GAAG,cAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAA,CAAC;YACzE,oBAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;SACrB;IACH,CAAC;CAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string\n>(\n pagedResult: PagedResult\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n ((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult as PagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {}\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/index.js b/node_modules/@azure/core-paging/dist-esm/src/index.js new file mode 100644 index 00000000..8af0a5d3 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models"; +export * from "./getPagedAsyncIterator"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/index.js.map b/node_modules/@azure/core-paging/dist-esm/src/index.js.map new file mode 100644 index 00000000..a0b2ac3f --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,UAAU,CAAC;AACzB,cAAc,yBAAyB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models\";\nexport * from \"./getPagedAsyncIterator\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/models.js b/node_modules/@azure/core-paging/dist-esm/src/models.js new file mode 100644 index 00000000..63155a9f --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/models.js.map b/node_modules/@azure/core-paging/dist-esm/src/models.js.map new file mode 100644 index 00000000..8d3ed5cc --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: PageSettingsT) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number\n ) => Promise<{ page: TPage; nextPageLink?: TLink }>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/index.js b/node_modules/@azure/core-paging/dist/index.js new file mode 100644 index 00000000..04275713 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/index.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var tslib = require('tslib'); + +// Copyright (c) Microsoft Corporation. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +function getPagedAsyncIterator(pagedResult) { + var _a; + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +function getItemAsyncIterator(pagedResult) { + return tslib.__asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var e_1, _a; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield tslib.__await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + yield yield tslib.__await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(pages))); + } + else { + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(firstVal.value))); + try { + for (var pages_1 = tslib.__asyncValues(pages), pages_1_1; pages_1_1 = yield tslib.__await(pages_1.next()), !pages_1_1.done;) { + const page = pages_1_1.value; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(page))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield tslib.__await(_a.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); +} +function getPageAsyncIterator(pagedResult, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield tslib.__await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + while (response.nextPageLink) { + response = yield tslib.__await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + } + }); +} + +exports.getPagedAsyncIterator = getPagedAsyncIterator; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-paging/dist/index.js.map b/node_modules/@azure/core-paging/dist/index.js.map new file mode 100644 index 00000000..f93de0c5 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/getPagedAsyncIterator.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string\n>(\n pagedResult: PagedResult\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n ((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult as PagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {}\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n yield response.page;\n }\n}\n"],"names":["__await","__asyncDelegator","__asyncValues"],"mappings":";;;;;;AAAA;AAKA;;;;;;AAMG;AACG,SAAU,qBAAqB,CAMnC,WAAqD,EAAA;;AAErD,IAAA,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI,GAAA;AACF,YAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;SACpB;QACD,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,YAAA,OAAO,IAAI,CAAC;SACb;AACD,QAAA,MAAM,EACJ,CAAA,EAAA,GAAA,WAAW,KAAA,IAAA,IAAX,WAAW,KAAX,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,WAAW,CAAE,MAAM,MACnB,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,IAAC,CAAC,QAAuB,KAAI;AAC3B,YAAA,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,KAAA,IAAA,IAAR,QAAQ,KAAA,KAAA,CAAA,GAAR,QAAQ,GAAI,EAAE,CAAC;YAC1D,OAAO,oBAAoB,CAAC,WAAsD,EAAE;AAClF,gBAAA,QAAQ,EAAE,iBAAiD;gBAC3D,WAAW;AACZ,aAAA,CAAC,CAAC;AACL,SAAC,CAAC;KACL,CAAC;AACJ,CAAC;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EAAA;;;AAErD,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,MAAMA,aAAA,CAAA,KAAK,CAAC,IAAI,EAAE,CAAA,CAAC;;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAClC,YAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,KAAK,CAAA,CAAC;;YAErB,MAAAA,aAAA,CAAA,OAAOC,sBAAA,CAAAC,oBAAA,KAAmD,CAAA,CAAA,CAAA,CAAC;AAC5D,SAAA;AAAM,aAAA;YACL,MAAAF,aAAA,CAAA,OAAOC,sBAAA,CAAAC,mBAAA,CAAA,QAAQ,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;;AACtB,gBAAA,KAAyB,IAAA,OAAA,GAAAA,mBAAA,CAAA,KAAK,CAAA,EAAA,SAAA,EAAA,SAAA,GAAA,MAAAF,aAAA,CAAA,OAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,SAAA,CAAA,IAAA,GAAA;oBAAnB,MAAM,IAAI,kBAAA,CAAA;;;oBAGnB,MAAAA,aAAA,CAAA,OAAOC,sBAAA,CAAAC,oBAAA,IAA6B,CAAA,CAAA,CAAA,CAAC;AACtC,iBAAA;;;;;;;;;AACF,SAAA;KACF,CAAA,CAAA;AAAA,CAAA;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE,EAAA;;AAEN,QAAA,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;QAC1C,IAAI,QAAQ,GAAG,MAAMF,aAAA,CAAA,WAAW,CAAC,OAAO,CAAC,QAAQ,KAAR,IAAA,IAAA,QAAQ,cAAR,QAAQ,GAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAA,CAAC;AAC7F,QAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;QACpB,OAAO,QAAQ,CAAC,YAAY,EAAE;AAC5B,YAAA,QAAQ,GAAG,MAAAA,aAAA,CAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAA,CAAC;AACzE,YAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;AACrB,SAAA;KACF,CAAA,CAAA;AAAA;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/README.md b/node_modules/@azure/core-paging/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js b/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json b/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/package.json b/node_modules/@azure/core-paging/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.html b/node_modules/@azure/core-paging/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.js b/node_modules/@azure/core-paging/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-paging/package.json b/node_modules/@azure/core-paging/package.json new file mode 100644 index 00000000..3bfdc6ea --- /dev/null +++ b/node_modules/@azure/core-paging/package.json @@ -0,0 +1,108 @@ +{ + "name": "@azure/core-paging", + "author": "Microsoft Corporation", + "sdk-type": "client", + "version": "1.3.0", + "description": "Core types for paging async iterable iterators", + "tags": [ + "microsoft", + "clientruntime" + ], + "keywords": [ + "microsoft", + "clientruntime", + "azure", + "cloud" + ], + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/latest/core-paging.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/*": [ + "types/3.1/*" + ] + } + }, + "files": [ + "types/latest/core-paging.d.ts", + "types/3.1", + "dist/", + "dist-esm/src/", + "LICENSE", + "README.md" + ], + "engines": { + "node": ">=12.0.0" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-paging/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "echo skipped", + "build:types": "downlevel-dts types/latest/ types/3.1/", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp *.tgz types *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint": "eslint package.json src --ext .ts", + "lint:fix": "eslint package.json src --ext .ts --fix --fix-type [problem,suggestion]", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 50000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "sideEffects": true, + "private": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@microsoft/api-extractor": "7.18.11", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "typescript": "~4.6.0" + }, + "//sampleConfiguration": { + "skipFolder": true, + "disableDocsMs": true, + "productName": "Azure SDK Core", + "productSlugs": [ + "azure" + ] + } +} diff --git a/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts b/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts new file mode 100644 index 00000000..72a7216c --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts @@ -0,0 +1,60 @@ +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export declare interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export declare interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that tracks the settings for paged iteration + */ +export declare interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +export {}; diff --git a/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts b/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts new file mode 100644 index 00000000..deb1df2e --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts @@ -0,0 +1,2 @@ +export declare function main(): Promise; +//# sourceMappingURL=getPagedAsyncIteratorSample.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts new file mode 100644 index 00000000..cc5517ee --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/index.d.ts b/node_modules/@azure/core-paging/types/3.1/src/index.d.ts new file mode 100644 index 00000000..b116c031 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models"; +export * from "./getPagedAsyncIterator"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/models.d.ts b/node_modules/@azure/core-paging/types/3.1/src/models.d.ts new file mode 100644 index 00000000..f7cd3000 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/models.d.ts @@ -0,0 +1,52 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} +//# sourceMappingURL=models.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts b/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts new file mode 100644 index 00000000..5aba89c2 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=getPagedAsyncIterator.spec.d.ts.map diff --git a/node_modules/@azure/core-paging/types/latest/core-paging.d.ts b/node_modules/@azure/core-paging/types/latest/core-paging.d.ts new file mode 100644 index 00000000..7759fe3c --- /dev/null +++ b/node_modules/@azure/core-paging/types/latest/core-paging.d.ts @@ -0,0 +1,64 @@ +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; + +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export declare interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} + +/** + * An interface that describes how to communicate with the service. + */ +export declare interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} + +/** + * An interface that tracks the settings for paged iteration + */ +export declare interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} + +export { } diff --git a/node_modules/@azure/core-tracing/CHANGELOG.md b/node_modules/@azure/core-tracing/CHANGELOG.md new file mode 100644 index 00000000..ee8b4b04 --- /dev/null +++ b/node_modules/@azure/core-tracing/CHANGELOG.md @@ -0,0 +1,81 @@ +# Release History + +## 1.0.0-preview.13 (2021-07-15) + +### Features Added + +- Added support for disabling distributed tracing using the AZURE_TRACING_DISABLED environment variable. + +### Breaking Changes + +- Removed `TestTracer` and `TestSpan` from public API and into `@azure/test-utils`. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + - `TestTracer` and `TestSpan` are intended for test support when used by other Azure packages and not intended for use by end users. +- Removed `setTracer`, @azure/core-tracing will now rely on the `trace` API to fetch a tracer from the global tracer provider. [PR #16347](https://github.com/Azure/azure-sdk-for-js/pull/16347) + - If you are using `setTracer`, please use `trace.setGlobalTracerProvider(provider)` instead as described in the OpenTelemetry documentation. +- Removed `NoOpTracer` and `NoOpSpan` from the public API. Please use `trace.wrapSpanContext(INVALID_SPAN_CONTEXT)` from `@opentelemetry/api` instead. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + +## 1.0.0-preview.12 (2021-06-30) + +- Update `@opentelemetry/api` to version 1.0.0 [PR #15883](https://github.com/Azure/azure-sdk-for-js/pull/15883) + - This version ships with ESM modules and fixes an issue where Angular projects would warn ab out optimization bailouts due to dependencies on CommonJS or AMD. + +### Breaking Changes + +- Removed `OpenCensusSpanWrapper` and `OpenCensusTracerWrapper` from the public API. Customers using these wrappers should migrate to using `OpenTelemetry` directly. [PR #15770](https://github.com/Azure/azure-sdk-for-js/pull/15770) +- Update `@azure/core-tracing` to version 1.0.0-preview.12. This brings core-tracing up to date with `@opentelemetry/api@1.0.0`. + - `Span#context` was renamed to `Span#spanContext`. This change is supported in `@azure/core-http@1.2.7`. + +## 1.0.0-preview.11 (2021-03-30) + +### Breaking Changes + +- Update @azure/core-tracing to version 1.0.0-preview.11. This brings core-tracing up to date with @opentelemetry/api@1.0.0-rc.0. + There are two scenarios that will require changes if you are using tracing: + - Previously, you would pass a parent span using the `OperationOptions.tracingOptions.spanOptions.parentSpan` property. This has been + changed so that you now specify a parent `Context` using the `OperationOptions.tracingOptions.tracingContext` property instead. + - The status code for Spans is no longer of type `CanonicalCode`. Instead, it's now `SpanStatusCode`, which also has a smaller range of values. + +## 1.0.0-preview.10 (2021-03-04) + +- Internal improvements to make future opentelemetry updates simpler. + +## 1.0.0-preview.9 (2020-08-04) + +- Update `@opentelemetry/api` to version 0.10.2 [PR 10393](https://github.com/Azure/azure-sdk-for-js/pull/10393) + +## 1.0.0-preview.8 (2020-04-28) + +- Update `TestSpan` to allow setting span attributes [PR link](https://github.com/Azure/azure-sdk-for-js/pull/6565). +- [BREAKING] Migrate to OpenTelemetry 0.6 using the new `@opentelemetry/api` package. There were a few breaking changes: + - `SpanContext` now requires traceFlags to be set. + - `Tracer` has removed `recordSpanData`, `getBinaryFormat`, and `getHttpTextFormat`. + - `Tracer.getCurrentSpan` returns `undefined` instead of `null` when unset. + - `Link` objects renamed `spanContext` property to `context`. + +## 1.0.0-preview.7 (2019-12-03) + +- Updated the behavior of how incompatible versions of OpenTelemetry Tracer are handled. Now, errors will be thrown only if the user has manually set a Tracer. This means that incompatible versions will be silently ignored when tracing is not enabled. +- Updated to use OpenTelemetry 0.2 via the `@opentelemetry/types` package. There were two breaking changes in this update: + - `isRecordingEvents` on `Span` was renamed to `isRecording`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/454) + - `addLink` was removed from `Span` as links are now only allowed to be added during span creation. This is possible by specifying any necessary links inside `SpanOptions`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/449) + +## 1.0.0-preview.5 (2019-10-22) + +- Fixes issue where loading multiple copies of this module could result in the tracer set by `setTracer()` being reset. + +## 1.0.0-preview.4 (2019-10-08) + +- Remove dependency on the `debug` module to ensure compatibility with IE11 + +## 1.0.0-preview.3 (2019-10-07) + +- Updated to use the latest types from OpenTelemetry (PR [#5182](https://github.com/Azure/azure-sdk-for-js/pull/5182)) +- Clean up and refactored code for easier usage and testability. (PR [#5233](https://github.com/Azure/azure-sdk-for-js/pull/5233) and PR [#5283](https://github.com/Azure/azure-sdk-for-js/pull/5283)) + +## 1.0.0-preview.2 (2019-09-09) + +Updated the `OpenCensusSpanPlugin` & the `NoOpSpanPlugin` to support for retrieving span context. This allows updating of request headers with the right [span context](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format). (PR [#4712](https://github.com/Azure/azure-sdk-for-js/pull/4712)) + +## 1.0.0-preview.1 (2019-08-05) + +Provides low-level interfaces and helper methods for tracing in Azure SDK diff --git a/node_modules/@azure/core-tracing/LICENSE b/node_modules/@azure/core-tracing/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/core-tracing/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-tracing/README.md b/node_modules/@azure/core-tracing/README.md new file mode 100644 index 00000000..dc088cba --- /dev/null +++ b/node_modules/@azure/core-tracing/README.md @@ -0,0 +1,72 @@ +# Azure Core tracing library for JavaScript + +This is the core tracing library that provides low-level interfaces and helper methods for tracing in Azure SDK JavaScript libraries which work in the browser and Node.js. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-tracing +``` + +## Key Concepts + +The `@azure/core-tracing` package supports enabling tracing for Azure SDK packages, using an [OpenTelemetry](https://opentelemetry.io/) `Tracer`. + +By default, all libraries log with a `NoOpTracer` that takes no action. +To change this, you have to use `setTracer` to set a new default `Tracer`. + +## Examples + +### Example 1 - Setting an OpenTelemetry Tracer + +```js +const opentelemetry = require("@opentelemetry/api"); +const { BasicTracer, SimpleSpanProcessor } = require("@opentelemetry/tracing"); +const { ZipkinExporter } = require("@opentelemetry/exporter-zipkin"); +const { setTracer } = require("@azure/core-tracing"); + +const exporter = new ZipkinExporter({ + serviceName: "azure-tracing-sample" +}); +const tracer = new BasicTracer(); +tracer.addSpanProcessor(new SimpleSpanProcessor(exporter)); + +setTracer(tracer); + +const rootSpan = tracer.startSpan("root"); +const context = opentelemetry.setSpan(opentelemetry.context.active(), rootSpan); + +// Call some client library methods and pass rootSpan via tracingOptions. + +rootSpan.end(); +exporter.shutdown(); +``` + +### Example 2 - Passing current Context to library operations + +```js +// Given a BlobClient from @azure/storage-blob +const result = await blobClient.download(undefined, undefined, { + tracingOptions: { + tracingContext: context + } +}); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-tracing%2FREADME.png) diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js new file mode 100644 index 00000000..b021cccc --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { SpanKind, setSpan, context as otContext, getTracer } from "./interfaces"; +import { trace, INVALID_SPAN_CONTEXT } from "@opentelemetry/api"; +export function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} +//# sourceMappingURL=createSpan.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map new file mode 100644 index 00000000..ab64b617 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpan.js","sourceRoot":"","sources":["../../src/createSpan.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAIL,QAAQ,EACR,OAAO,EACP,OAAO,IAAI,SAAS,EACpB,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAuBjE,MAAM,UAAU,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;QAClC,qDAAqD;QACrD,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAE,QAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAG,KAAK,CAAC,eAAe,CAAC,oBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAI,SAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js b/node_modules/@azure/core-tracing/dist-esm/src/index.js new file mode 100644 index 00000000..065b66c9 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Tracers and wrappers +export { createSpanFunction } from "./createSpan"; +// Shared interfaces +export { context, getSpan, getSpanContext, getTracer, isSpanContextValid, setSpan, setSpanContext, SpanKind, SpanStatusCode } from "./interfaces"; +// Utilities +export { extractSpanContextFromTraceParentHeader, getTraceParentHeader } from "./utils/traceParentHeader"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js.map b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map new file mode 100644 index 00000000..37bcb747 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,uBAAuB;AACvB,OAAO,EAAE,kBAAkB,EAA0B,MAAM,cAAc,CAAC;AAE1E,oBAAoB;AACpB,OAAO,EACL,OAAO,EAOP,OAAO,EACP,cAAc,EACd,SAAS,EAET,kBAAkB,EAGlB,OAAO,EACP,cAAc,EAKd,QAAQ,EAGR,cAAc,EAKf,MAAM,cAAc,CAAC;AAEtB,YAAY;AACZ,OAAO,EACL,uCAAuC,EACvC,oBAAoB,EACrB,MAAM,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Tracers and wrappers\nexport { createSpanFunction, CreateSpanFunctionArgs } from \"./createSpan\";\n\n// Shared interfaces\nexport {\n context,\n Context,\n ContextAPI,\n Exception,\n ExceptionWithCode,\n ExceptionWithMessage,\n ExceptionWithName,\n getSpan,\n getSpanContext,\n getTracer,\n HrTime,\n isSpanContextValid,\n Link,\n OperationTracingOptions,\n setSpan,\n setSpanContext,\n Span,\n SpanAttributes,\n SpanAttributeValue,\n SpanContext,\n SpanKind,\n SpanOptions,\n SpanStatus,\n SpanStatusCode,\n TimeInput,\n TraceFlags,\n Tracer,\n TraceState\n} from \"./interfaces\";\n\n// Utilities\nexport {\n extractSpanContextFromTraceParentHeader,\n getTraceParentHeader\n} from \"./utils/traceParentHeader\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js new file mode 100644 index 00000000..e251b1ff --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { context as otContext, trace as otTrace } from "@opentelemetry/api"; +/** + * The kind of span. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export function getSpan(context) { + return otTrace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export function setSpan(context, span) { + return otTrace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return otTrace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export function getSpanContext(context) { + return otTrace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export function isSpanContextValid(context) { + return otTrace.isSpanContextValid(context); +} +export function getTracer(name, version) { + return otTrace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +export const context = otContext; +/** SpanStatusCode */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map new file mode 100644 index 00000000..cd371cd9 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../src/interfaces.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAoF5E;;GAEG;AACH,MAAM,CAAN,IAAY,QAyBX;AAzBD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IACZ;;;OAGG;IACH,2CAAU,CAAA;IACV;;;OAGG;IACH,2CAAU,CAAA;IACV;;;;OAIG;IACH,+CAAY,CAAA;IACZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EAzBW,QAAQ,KAAR,QAAQ,QAyBnB;AAqDD;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;IAC7C,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;GAOG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAoB;IACrD,OAAO,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;AAUD,MAAM,UAAU,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAO,OAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED,iCAAiC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAe,SAAS,CAAC;AAE7C,qBAAqB;AACrB,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js new file mode 100644 index 00000000..90657baf --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} +//# sourceMappingURL=traceParentHeader.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map new file mode 100644 index 00000000..d63dd9d2 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"traceParentHeader.js","sourceRoot":"","sources":["../../../src/utils/traceParentHeader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;GAIG;AACH,MAAM,UAAU,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,gBAAmB,CAAC;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC;IAErE,uEAAuE;IACvE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/index.js b/node_modules/@azure/core-tracing/dist/index.js new file mode 100644 index 00000000..527bbb4f --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js @@ -0,0 +1,219 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var api = require('@opentelemetry/api'); + +// Copyright (c) Microsoft Corporation. +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(exports.SpanKind || (exports.SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +function getSpan(context) { + return api.trace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +function setSpan(context, span) { + return api.trace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return api.trace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +function getSpanContext(context) { + return api.trace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +function isSpanContextValid(context) { + return api.trace.isSpanContextValid(context); +} +function getTracer(name, version) { + return api.trace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +const context = api.context; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); + +// Copyright (c) Microsoft Corporation. +function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} + +exports.context = context; +exports.createSpanFunction = createSpanFunction; +exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; +exports.getSpan = getSpan; +exports.getSpanContext = getSpanContext; +exports.getTraceParentHeader = getTraceParentHeader; +exports.getTracer = getTracer; +exports.isSpanContextValid = isSpanContextValid; +exports.setSpan = setSpan; +exports.setSpanContext = setSpanContext; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-tracing/dist/index.js.map b/node_modules/@azure/core-tracing/dist/index.js.map new file mode 100644 index 00000000..c1b0268e --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/interfaces.ts","../src/createSpan.ts","../src/utils/traceParentHeader.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"],"names":["SpanKind","otTrace","otContext","SpanStatusCode","trace","INVALID_SPAN_CONTEXT"],"mappings":";;;;;;AAAA;AA0FA,WAAY,QAAQ;;IAElB,+CAAY,CAAA;;;;;IAKZ,2CAAU,CAAA;;;;;IAKV,2CAAU,CAAA;;;;;;IAMV,+CAAY,CAAA;;;;;;IAMZ,+CAAY,CAAA;AACd,CAAC,EAzBWA,gBAAQ,KAARA,gBAAQ,QAyBnB;AAqDD;;;;;SAKgB,OAAO,CAAC,OAAgB;IACtC,OAAOC,SAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;;SAMgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAOA,SAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;;SAOgB,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;;SAKgB,cAAc,CAAC,OAAgB;IAC7C,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;;SAQgB,kBAAkB,CAAC,OAAoB;IACrD,OAAOA,SAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;SAUe,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAOA,SAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED;MACa,OAAO,GAAeC,YAAU;AAG7C,WAAY,cAAc;;;;IAIxB,qDAAS,CAAA;;;;;IAKT,+CAAM,CAAA;;;;IAIN,qDAAS,CAAA;AACX,CAAC,EAdWC,sBAAc,KAAdA,sBAAc;;ACrP1B;AACA,SAkCgB,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;;QAElC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;;AAkBA,SAAgB,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAEH,gBAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,GAAG,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,GAAG,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAGI,SAAK,CAAC,eAAe,CAACC,wBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAIH,OAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;KACH,CAAC;AACJ,CAAC;;ACzHD;AACA;AAIA,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;;AAKA,SAAgB,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;;AAKA,SAAgB,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,iBAAoB;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,IAAI,QAAQ,EAAE,GAAG,QAAQ,CAAC;;IAGrE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/README.md b/node_modules/@azure/core-tracing/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js b/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json b/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/package.json b/node_modules/@azure/core-tracing/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-tracing/package.json b/node_modules/@azure/core-tracing/package.json new file mode 100644 index 00000000..1cd7235e --- /dev/null +++ b/node_modules/@azure/core-tracing/package.json @@ -0,0 +1,99 @@ +{ + "name": "@azure/core-tracing", + "version": "1.0.0-preview.13", + "description": "Provides low-level interfaces and helper methods for tracing in Azure SDK", + "sdk-type": "client", + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "browser": { + "./dist-esm/src/utils/global.js": "./dist-esm/src/utils/global.browser.js" + }, + "types": "types/core-tracing.d.ts", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "prebuild": "npm run clean", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "files": [ + "dist/", + "dist-esm/src/", + "types/core-tracing.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "tracing", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=12.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/README.md", + "sideEffects": false, + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/core-auth": "^1.3.0", + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.7.11", + "@opentelemetry/tracing": "^0.22.0", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.15.0", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "prettier": "^1.16.4", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "typescript": "~4.2.0", + "util": "^0.12.1", + "typedoc": "0.15.2", + "sinon": "^9.0.2", + "@types/sinon": "^9.0.4" + } +} diff --git a/node_modules/@azure/core-tracing/types/core-tracing.d.ts b/node_modules/@azure/core-tracing/types/core-tracing.d.ts new file mode 100644 index 00000000..bdae78d0 --- /dev/null +++ b/node_modules/@azure/core-tracing/types/core-tracing.d.ts @@ -0,0 +1,531 @@ + +/** + * OpenTelemetry compatible interface for Context + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} + +/** Entrypoint for context API */ +export declare const context: ContextAPI; + +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare interface ContextAPI { + /** + * Get the currently active context + */ + active(): Context; +} + +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export declare function createSpanFunction(args: CreateSpanFunctionArgs): (operationName: string, operationOptions: T | undefined) => { + span: Span; + updatedOptions: T; +}; + +/** + * Arguments for `createSpanFunction` that allow you to specify the + * prefix for each created span as well as the `az.namespace` attribute. + * + * @hidden + */ +export declare interface CreateSpanFunctionArgs { + /** + * Package name prefix. + * + * NOTE: if this is empty no prefix will be applied to created Span names. + */ + packagePrefix: string; + /** + * Service namespace + * + * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans. + */ + namespace: string; +} + +/** + * An Exception for a Span. + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; + +/** + * An Exception with a code. + */ +export declare interface ExceptionWithCode { + /** The code. */ + code: string | number; + /** The name. */ + name?: string; + /** The message. */ + message?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a message. + */ +export declare interface ExceptionWithMessage { + /** The code. */ + code?: string | number; + /** The message. */ + message: string; + /** The name. */ + name?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a name. + */ +export declare interface ExceptionWithName { + /** The code. */ + code?: string | number; + /** The message. */ + message?: string; + /** The name. */ + name: string; + /** The stack. */ + stack?: string; +} + +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export declare function extractSpanContextFromTraceParentHeader(traceParentHeader: string): SpanContext | undefined; + +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; + +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; + +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export declare function getTraceParentHeader(spanContext: SpanContext): string | undefined; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(): Tracer; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(name: string, version?: string): Tracer; + +/** + * Represents high resolution time. + */ +export declare type HrTime = [number, number]; + +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export declare function isSpanContextValid(context: SpanContext): boolean; + +/** + * Used to specify a span that is linked to another. + */ +export declare interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} + +/** + * Tracing options to set on an operation. + */ +export declare interface OperationTracingOptions { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context to use for created Spans. + */ + tracingContext?: Context; +} + +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; + +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; + +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export declare interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key - the key for this attribute. + * @param value - the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes - the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name - the name of the event. + * @param attributesOrStartTime - the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is TimeInput and 3rd param is undefined + * @param startTime - start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status - the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param endTime - the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name - the Span name. + */ + updateName(name: string): this; +} + +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Attributes for a Span. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; + +/** + * A light interface that tries to be structurally compatible with OpenTelemetry + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} + +/** + * The kind of span. + */ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} + +/** + * An interface that enables manual propagation of Spans + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** + * The type of Span. Default to SpanKind.INTERNAL + */ + kind?: SpanKind; + /** + * A manually specified start time for the created `Span` object. + */ + startTime?: TimeInput; +} + +/** + * The status for a span. + */ +export declare interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} + +/** SpanStatusCode */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} + +/** + * Used to represent a Time. + */ +export declare type TimeInput = HrTime | number | Date; + +/** + * Shorthand enum for common traceFlags values inside SpanContext + */ +export declare const enum TraceFlags { + /** No flag set. */ + NONE = 0, + /** Caller is collecting trace information. */ + SAMPLED = 1 +} + +/** + * A Tracer. + */ +export declare interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method does NOT modify the current Context. + * + * @param name - The name of the span + * @param options - SpanOptions used for span creation + * @param context - Context to use to extract parent + * @returns The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; +} + +/** + * TraceState. + */ +export declare interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key - key of the TraceState entry. + * @param value - value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key - the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key - with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} + +export { } diff --git a/node_modules/@azure/logger/CHANGELOG.md b/node_modules/@azure/logger/CHANGELOG.md new file mode 100644 index 00000000..85958736 --- /dev/null +++ b/node_modules/@azure/logger/CHANGELOG.md @@ -0,0 +1,27 @@ +# Release History + +## 1.0.3 (2021-09-30) + +### Other Changes + +- Updates package to work with the react native bundler. [PR #17783](https://github.com/Azure/azure-sdk-for-js/pull/17783) + +## 1.0.2 (2021-03-04) + +- When logging in the browser, the default log function will now log messages to the corresponding console log function (e.g. `info` level is sent to `console.info()`.) PR [#14103](https://github.com/Azure/azure-sdk-for-js/pull/14103) + +## 1.0.1 (2021-01-07) + +- Updates the `tslib` dependency to version 2.x. + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/logging` package. + +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.1 (2019-10-22) + +Provides methods to set log levels that enable logs in Azure SDKs that support logging. +Also supports redirecting log outputs via a method override. diff --git a/node_modules/@azure/logger/LICENSE b/node_modules/@azure/logger/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/logger/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/logger/README.md b/node_modules/@azure/logger/README.md new file mode 100644 index 00000000..b1b3b5a2 --- /dev/null +++ b/node_modules/@azure/logger/README.md @@ -0,0 +1,88 @@ +# Azure Logger client library for JavaScript + +The `@azure/logger` package can be used to enable logging in the Azure SDKs for JavaScript. + +Logging can be enabled for the Azure SDK in the following ways: + +- Setting the AZURE_LOG_LEVEL environment variable +- Calling setLogLevel imported from "@azure/logger" +- Calling enable() on specific loggers +- Using the `DEBUG` environment variable. + +Note that AZURE_LOG_LEVEL, if set, takes precedence over DEBUG. Only use DEBUG without specifying AZURE_LOG_LEVEL or calling setLogLevel. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/logger +``` + +## Key Concepts + +The `@azure/logger` package supports the following log levels +specified in order of most verbose to least verbose: + +- verbose +- info +- warning +- error + +When setting a log level, either programmatically or via the `AZURE_LOG_LEVEL` environment variable, +any logs that are written using a log level equal to or less than the one you choose +will be emitted. + +For example, setting the log level to `warning` will cause all logs that have the log +level `warning` or `error` to be emitted. + +## Examples + +### Example 1 - basic usage + +```js +const { EventHubClient } = require('@azure/event-hubs'); + +const logger = require('@azure/logger'); +logger.setLogLevel('info'); + +// operations will now emit info, warning, and error logs +const client = new EventHubClient(/* params */); +client.getPartitionIds() + .then(ids => { /* do work */ }) + .catch(e => { /* do work */ }); +}); +``` + +### Example 2 - redirect log output + +```js +const { AzureLogger, setLogLevel } = require("@azure/logger"); + +setLogLevel("verbose"); + +// override logging to output to console.log (default location is stderr) +AzureLogger.log = (...args) => { + console.log(...args); +}; +``` + +Using `AzureLogger`, it is possible to redirect the logging output from the Azure SDKs by +overriding the `AzureLogger.log` method. This may be useful if you want to redirect logs to +a location other than stderr. + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Flogger%2FREADME.png) diff --git a/node_modules/@azure/logger/dist-esm/src/debug.js b/node_modules/@azure/logger/dist-esm/src/debug.js new file mode 100644 index 00000000..fecbff47 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/debug.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { log } from "./log"; +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +export default debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/debug.js.map b/node_modules/@azure/logger/dist-esm/src/debug.js.map new file mode 100644 index 00000000..63124740 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAgE5B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE;IACpB,MAAM,CAAC,gBAAgB,CAAC,CAAC;CAC1B;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE;QAC9B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACtB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;SACzD;aAAM;YACL,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;SAC/C;KACF;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;KAChD;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3B,OAAO,IAAI,CAAC;KACb;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE;QACvC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC3B,OAAO,KAAK,CAAC;SACd;KACF;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE;QAChD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YACpC,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;YACxB,OAAO;SACR;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;YACnB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;SACrC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE;QACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,eAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log\n }\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/index.js b/node_modules/@azure/logger/dist-esm/src/index.js new file mode 100644 index 00000000..116b59e7 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/index.js @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import debug from "./debug"; +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +export function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose") + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) { + return true; + } + else { + return false; + } +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/index.js.map b/node_modules/@azure/logger/dist-esm/src/index.js.map new file mode 100644 index 00000000..a9d7880e --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAmB,MAAM,SAAS,CAAC;AAG1C,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACH,MAAM,CAAC,MAAM,WAAW,GAAsB,KAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,KAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE;IACnB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE;QACpC,WAAW,CAAC,eAAe,CAAC,CAAC;KAC9B;SAAM;QACL,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;KACH;CACF;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;KACH;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE;QACtC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;YACxB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC1C;KACF;IAED,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;QACxB,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;KAC1D;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,IAAI,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,EAAE;QACtE,OAAO,IAAI,CAAC;KACb;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { Debugger } from \"./debug\";\nexport { Debugger } from \"./debug\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \"\n )}.`\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed trbouleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n } else {\n return false;\n }\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.browser.js b/node_modules/@azure/logger/dist-esm/src/log.browser.js new file mode 100644 index 00000000..3f69bb23 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.browser.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function log(...args) { + if (args.length > 0) { + const firstArg = String(args[0]); + if (firstArg.includes(":error")) { + console.error(...args); + } + else if (firstArg.includes(":warning")) { + console.warn(...args); + } + else if (firstArg.includes(":info")) { + console.info(...args); + } + else if (firstArg.includes(":verbose")) { + console.debug(...args); + } + else { + console.debug(...args); + } + } +} +//# sourceMappingURL=log.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.browser.js.map b/node_modules/@azure/logger/dist-esm/src/log.browser.js.map new file mode 100644 index 00000000..4542a4e8 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.browser.js","sourceRoot":"","sources":["../../src/log.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,UAAU,GAAG,CAAC,GAAG,IAAW;IAChC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YAC/B,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;SACvB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;SACvB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YACxC,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;aAAM;YACL,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;KACF;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport function log(...args: any[]): void {\n if (args.length > 0) {\n const firstArg = String(args[0]);\n if (firstArg.includes(\":error\")) {\n console.error(...args);\n } else if (firstArg.includes(\":warning\")) {\n console.warn(...args);\n } else if (firstArg.includes(\":info\")) {\n console.info(...args);\n } else if (firstArg.includes(\":verbose\")) {\n console.debug(...args);\n } else {\n console.debug(...args);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.js b/node_modules/@azure/logger/dist-esm/src/log.js new file mode 100644 index 00000000..36c93344 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import util from "util"; +import { EOL } from "os"; +export function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${EOL}`); +} +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.js.map b/node_modules/@azure/logger/dist-esm/src/log.js.map new file mode 100644 index 00000000..4b178616 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,MAAM,IAAI,CAAC;AAEzB,MAAM,UAAU,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;AACjE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport util from \"util\";\nimport { EOL } from \"os\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/index.js b/node_modules/@azure/logger/dist/index.js new file mode 100644 index 00000000..327fbdbe --- /dev/null +++ b/node_modules/@azure/logger/dist/index.js @@ -0,0 +1,210 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var util = _interopDefault(require('util')); +var os = require('os'); + +// Copyright (c) Microsoft Corporation. +function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${os.EOL}`); +} + +// Copyright (c) Microsoft Corporation. +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} + +// Copyright (c) Microsoft Corporation. +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +const AzureLogger = debugObj("azure"); +AzureLogger.log = (...args) => { + debugObj.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debugObj.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose") + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debugObj.disable(); + debugObj.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) { + return true; + } + else { + return false; + } +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} + +exports.AzureLogger = AzureLogger; +exports.createClientLogger = createClientLogger; +exports.getLogLevel = getLogLevel; +exports.setLogLevel = setLogLevel; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/logger/dist/index.js.map b/node_modules/@azure/logger/dist/index.js.map new file mode 100644 index 00000000..9c744a9d --- /dev/null +++ b/node_modules/@azure/logger/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/log.ts","../src/debug.ts","../src/index.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport util from \"util\";\nimport { EOL } from \"os\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log\n }\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { Debugger } from \"./debug\";\nexport { Debugger } from \"./debug\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \"\n )}.`\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed trbouleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n } else {\n return false;\n }\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"],"names":["EOL","debug"],"mappings":";;;;;;;;;AAAA;SAMgB,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAGA,MAAG,EAAE,CAAC,CAAC;AACjE;;ACRA;AACA,AAkEA,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,KAAK,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE;IACpB,MAAM,CAAC,gBAAgB,CAAC,CAAC;CAC1B;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB;IAChB,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE;QAC9B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACtB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;SACzD;aAAM;YACL,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;SAC/C;KACF;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;KAChD;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3B,OAAO,IAAI,CAAC;KACb;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE;QACvC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC3B,OAAO,KAAK,CAAC;SACd;KACF;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE;QAChD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YACpC,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;YACxB,OAAO;SACR;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;YACnB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;SACrC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;KAC1B;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE;QACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;;AC1KD;AACA,AAKA,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,KAAK,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;;AAKA,MAAa,WAAW,GAAsBC,QAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI;IACxBA,QAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE;;IAEnB,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE;QACpC,WAAW,CAAC,eAAe,CAAC,CAAC;KAC9B;SAAM;QACL,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;KACH;CACF;AAED;;;;;;;;;AASA,SAAgB,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;KACH;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE;QACtC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;YACxB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC1C;KACF;IAEDA,QAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;;AAGA,SAAgB,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;;AAKA,SAAgB,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI;QAClB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;KACrB,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;QACxB,MAAM,iBAAiB,GAAGA,QAAK,CAAC,OAAO,EAAE,CAAC;QAC1CA,QAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;KAC1D;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,IAAI,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,EAAE;QACtE,OAAO,IAAI,CAAC;KACb;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt b/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/README.md b/node_modules/@azure/logger/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/logger/node_modules/tslib/modules/index.js b/node_modules/@azure/logger/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/logger/node_modules/tslib/modules/package.json b/node_modules/@azure/logger/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/package.json b/node_modules/@azure/logger/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts b/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.html b/node_modules/@azure/logger/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.js b/node_modules/@azure/logger/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/logger/package.json b/node_modules/@azure/logger/package.json new file mode 100644 index 00000000..6ca97736 --- /dev/null +++ b/node_modules/@azure/logger/package.json @@ -0,0 +1,109 @@ +{ + "name": "@azure/logger", + "sdk-type": "client", + "version": "1.0.3", + "description": "Microsoft Azure SDK for JavaScript - Logger", + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "browser": { + "./dist-esm/src/log.js": "./dist-esm/src/log.browser.js", + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/index.js" + }, + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "npm run clean && tsc -p . && rollup -c 2>&1 && api-extractor run --local --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "pretest": "npm run build:test", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "types": "./types/logger.d.ts", + "files": [ + "dist/", + "dist-esm/src/", + "types/logger.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "log", + "logger", + "logging", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger/README.md", + "sideEffects": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/sinon": "^9.0.4", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "delay": "^4.2.0", + "dotenv": "^8.2.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "nyc": "^14.0.0", + "prettier": "^1.16.4", + "puppeteer": "^10.2.0", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "sinon": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "typedoc": "0.15.2" + } +} diff --git a/node_modules/@azure/logger/types/logger.d.ts b/node_modules/@azure/logger/types/logger.d.ts new file mode 100644 index 00000000..9e2a63dc --- /dev/null +++ b/node_modules/@azure/logger/types/logger.d.ts @@ -0,0 +1,104 @@ +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export declare type AzureClientLogger = Debugger; + +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; + +/** + * Defines the methods available on the SDK-facing logger. + */ +export declare interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed trbouleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} + +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare type AzureLogLevel = "verbose" | "info" | "warning" | "error"; + +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; + +/** + * A log function that can be dynamically enabled and redirected. + */ +export declare interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} + +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; + +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; + +export { } diff --git a/node_modules/@azure/ms-rest-js/LICENSE b/node_modules/@azure/ms-rest-js/LICENSE new file mode 100644 index 00000000..21071075 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@azure/ms-rest-js/README.md b/node_modules/@azure/ms-rest-js/README.md new file mode 100644 index 00000000..7beda9cd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/README.md @@ -0,0 +1,41 @@ +# ms-rest-js + +[![Build Status](https://dev.azure.com/azure-public/azsdk/_apis/build/status/public.Azure.ms-rest-js%20-%20CI)](https://dev.azure.com/azure-public/azsdk/_build/latest?definitionId=39) + +Runtime for isomorphic javascript libraries (that work in the browser and node.js environment) generated via [Autorest](https://github.com/Azure/Autorest). + +## Requirements +- Node.js version > 6.x +- `npm install -g typescript` + +## Installation +- After cloning the repo, execute `npm install` + +## Execution + +### Node.js +- Set the subscriptionId and token as instructed in `samples/node-samples.ts` +- Run `npx ts-node samples/node-sample.js` + +### In the browser +- Run `npm run build` +- Set the subscriptionId and token then +- Open index.html file in the browser. It should show the response from GET request on the storage account. From Chrome type Ctrl + Shift + I and you can see the logs in console. + +## Architecture Overview + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +# Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt new file mode 100644 index 00000000..8c622df8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt @@ -0,0 +1,35 @@ +Third Party Notices for ms-rest-js + +This project incorporates material from the project(s) listed below (collectively, Third Party Code). +Microsoft, Inc. Microsoft is not the original author of the Third Party Code. +The original copyright notice and license, under which Microsoft received such Third Party Code, +are set out below. This Third Party Code is licensed to you under their original license terms set forth below. +Microsoft reserves all other rights not expressly granted, whether by implication, estoppel or otherwise. + +1. uuid (https://github.com/kelektiv/node-uuid) + +%% uuid NOTICES AND INFORMATION BEGIN HERE +========================================= +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +========================================= +END OF uuid NOTICES AND INFORMATION \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js new file mode 100644 index 00000000..8366361b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js @@ -0,0 +1,4405 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = global || self, factory(global.msRest = {})); +}(this, (function (exports) { 'use strict'; + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ + function getHeaderKey(headerName) { + return headerName.toLowerCase(); + } + function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; + } + /** + * A collection of HTTP header key/value pairs. + */ + var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Encodes a string in base64 format. + * @param value the string to encode + */ + function encodeString(value) { + return btoa(value); + } + /** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ + function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); + } + /** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ + function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } + + // Unique ID creation requires a high quality random # generator. In the browser we therefore + // require the crypto API and do not support built-in fallback to lower quality random number + // generators (like Math.random()). + var getRandomValues; + var rnds8 = new Uint8Array(16); + function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); + } + + var REGEX = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; + + function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); + } + + /** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + + var byteToHex = []; + + for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); + } + + function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; + } + + function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.1", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, + }; + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A constant that indicates whether the environment is node.js or browser based. + */ + var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + /** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ + function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); + } + /** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ + function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; + } + /** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ + function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; + } + /** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ + function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); + } + /** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ + function generateUuid() { + return v4(); + } + /** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ + function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; + } + /** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ + function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); + } + /** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ + function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; + } + /** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ + function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; + } + function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; + } + /** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ + function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); + } + var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + /** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ + function isDuration(value) { + return validateISODuration.test(value); + } + /** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ + function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); + } + /** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ + function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; + }()); + function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); + } + function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); + } + function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); + } + function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; + } + function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); + } + function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); + } + function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; + } + function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; + } + function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; + } + function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; + } + function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; + } + function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; + } + function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; + } + /** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ + function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; + } + function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; + } + function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); + } + function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; + } + function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; + } + function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; + } + function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; + } + function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); + } + function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); + } + // TODO: why is this here? + function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; + } + /** + * Utility function to create a K:V from a list of strings + */ + function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; + } + var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", + ]); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; + } + /** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ + var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; + }()); + + /*! ***************************************************************************** + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + ***************************************************************************** */ + /* global Reflect, Promise */ + + var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + + function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + } + + var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); + }; + + function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + } + + function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + } + + function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; + }(Error)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ + var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; + }()); + function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } + } + // exported locally for testing + function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; + } + function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + + // Copyright (c) Microsoft Corporation. + // Licensed under the MIT license. + /** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ + function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ + function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); + } + function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var parser = new DOMParser(); + function parseXML(str) { + try { + var dom = parser.parseFromString(str, "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } + } + var errorNS = ""; + try { + errorNS = parser.parseFromString("INVALID", "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + } + function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } + } + function isElement(node) { + return !!node.attributes; + } + /** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ + function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; + } + function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; + } + // tslint:disable-next-line:no-null-keyword + var doc = document.implementation.createDocument(null, null, null); + var serializer = new XMLSerializer(); + function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); + } + function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; + } + function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; + }()); + /** + * Optional properties that can be used when creating a RequestPolicy. + */ + var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ + function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; + } + var defaultJsonContentTypes = ["application/json", "text/json"]; + var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + /** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ + var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; + }(BaseRequestPolicy)); + function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; + } + function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; + } + function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); + } + function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + var DEFAULT_CLIENT_RETRY_COUNT = 3; + var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ + var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ + function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; + } + var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + function getDefaultUserAgentKey() { + return "x-ms-command-name"; + } + function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; + } + function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); + } + var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; + } + function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; + } + var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A class that handles the query portion of a URLBuilder. + */ + var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; + }()); + /** + * A class that handles creating, modifying, and parsing URLs. + */ + var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; + }()); + var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; + }()); + /** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ + function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); + } + /** + * A class that tokenizes URL strings. + */ + var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; + }()); + /** + * Read the remaining characters from this Tokenizer's character stream. + */ + function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; + } + /** + * Whether or not this URLTokenizer has a current character. + */ + function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; + } + /** + * Get the character in the text string at the current index. + */ + function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; + } + /** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ + function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } + } + /** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ + function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); + } + /** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ + function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; + } + /** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ + function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); + } + /** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ + function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); + } + function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } + } + function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + } + function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, + }; + function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; + } + var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; + }(BaseRequestPolicy)); + function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); + } + function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; + } + + function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; + } + var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; + }(BaseRequestPolicy)); + function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); + } + /** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ + function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; + } + /** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ + function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; + } + /** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ + function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; + } + /** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ + function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); + } + /** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ + function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; + } + var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ + var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ + function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry$1(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; + })(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; + } + var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + function getDefaultProxySettings(_proxyUrl) { + return undefined; + } + function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; + } + var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var StatusCodes = Constants.HttpConstants.StatusCodes; + var DEFAULT_RETRY_COUNT = 3; + function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; + } + /** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ + var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + /** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ + var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", + ]; + /** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ + var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * @class + * Initializes a new instance of the ServiceClient. + */ + var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new XhrHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; + }()); + function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } + } + function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; + } + function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; + } + function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy()); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; + } + function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); + } + function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; + } + function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; + } + function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; + } + var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; + }(BaseRequestPolicy)); + function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; + /** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ + var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants$1 = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; + var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Authenticates to a service using an API key. + */ + var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; + }(ApiKeyCredentials)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; + }(ApiKeyCredentials)); + + exports.ApiKeyCredentials = ApiKeyCredentials; + exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; + exports.BaseRequestPolicy = BaseRequestPolicy; + exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; + exports.Constants = Constants; + exports.DefaultHttpClient = XhrHttpClient; + exports.DomainCredentials = DomainCredentials; + exports.HttpHeaders = HttpHeaders; + exports.MapperType = MapperType; + exports.RequestPolicyOptions = RequestPolicyOptions; + exports.RestError = RestError; + exports.Serializer = Serializer; + exports.ServiceClient = ServiceClient; + exports.TokenCredentials = TokenCredentials; + exports.TopicCredentials = TopicCredentials; + exports.URLBuilder = URLBuilder; + exports.URLQuery = URLQuery; + exports.WebResource = WebResource; + exports.agentPolicy = agentPolicy; + exports.applyMixins = applyMixins; + exports.delay = delay; + exports.deserializationPolicy = deserializationPolicy; + exports.deserializeResponseBody = deserializeResponseBody; + exports.encodeUri = encodeUri; + exports.executePromisesSequentially = executePromisesSequentially; + exports.exponentialRetryPolicy = exponentialRetryPolicy; + exports.flattenResponse = flattenResponse; + exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; + exports.generateUuid = generateUuid; + exports.getDefaultProxySettings = getDefaultProxySettings; + exports.getDefaultUserAgentValue = getDefaultUserAgentValue; + exports.isDuration = isDuration; + exports.isNode = isNode; + exports.isValidUuid = isValidUuid; + exports.logPolicy = logPolicy; + exports.promiseToCallback = promiseToCallback; + exports.promiseToServiceCallback = promiseToServiceCallback; + exports.proxyPolicy = proxyPolicy; + exports.redirectPolicy = redirectPolicy; + exports.serializeObject = serializeObject; + exports.signingPolicy = signingPolicy; + exports.stripRequest = stripRequest; + exports.stripResponse = stripResponse; + exports.systemErrorRetryPolicy = systemErrorRetryPolicy; + exports.throttlingRetryPolicy = throttlingRetryPolicy; + exports.userAgentPolicy = userAgentPolicy; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); +//# sourceMappingURL=msRest.browser.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map new file mode 100644 index 00000000..74e52316 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.browser.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.browser.ts","../node_modules/uuid/dist/esm-browser/rng.js","../node_modules/uuid/dist/esm-browser/regex.js","../node_modules/uuid/dist/esm-browser/validate.js","../node_modules/uuid/dist/esm-browser/stringify.js","../node_modules/uuid/dist/esm-browser/v4.js","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/tslib/tslib.es6.js","../lib/restError.ts","../lib/xhrHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.browser.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.browser.ts","../lib/policies/userAgentPolicy.ts","../lib/url.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.browser.ts","../lib/policies/proxyPolicy.browser.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n","// Unique ID creation requires a high quality random # generator. In the browser we therefore\n// require the crypto API and do not support built-in fallback to lower quality random number\n// generators (like Math.random()).\nvar getRandomValues;\nvar rnds8 = new Uint8Array(16);\nexport default function rng() {\n // lazy load so that environments that need to polyfill have a chance to do so\n if (!getRandomValues) {\n // getRandomValues needs to be invoked in a context where \"this\" is a Crypto implementation. Also,\n // find the complete implementation of crypto (msCrypto) on IE11.\n getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);\n\n if (!getRandomValues) {\n throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');\n }\n }\n\n return getRandomValues(rnds8);\n}","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;","import REGEX from './regex.js';\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\n\nexport default validate;","import validate from './validate.js';\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\n\nvar byteToHex = [];\n\nfor (var i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr) {\n var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.1\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString(str, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n errorNS = parser.parseFromString(\"INVALID\", \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI!;\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","HttpPipelineLogLevel","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;IAAA;IACA;IAEA;;;IAGA,SAAS,YAAY,CAAC,UAAkB;QACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;IAClC,CAAC;aA4Ee,iBAAiB,CAAC,MAAY;QAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YACzC,OAAO,KAAK,CAAC;SACd;QAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;YACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;YAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;YACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;YACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;YACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;QAME,qBAAY,UAA2B;YACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,UAAU,EAAE;gBACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;oBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;iBAC9C;aACF;SACF;;;;;;;QAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;YACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;gBAC3C,IAAI,EAAE,UAAU;gBAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;aAC9B,CAAC;SACH;;;;;;QAOM,yBAAG,GAAV,UAAW,UAAkB;YAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;SAC3C;;;;QAKM,8BAAQ,GAAf,UAAgB,UAAkB;YAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;SACrD;;;;;;QAOM,4BAAM,GAAb,UAAc,UAAkB;YAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;YAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YAClD,OAAO,MAAM,CAAC;SACf;;;;QAKM,gCAAU,GAAjB;YACE,IAAM,MAAM,GAAmB,EAAE,CAAC;YAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,OAAO,GAAiB,EAAE,CAAC;YACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;aAC3C;YACD,OAAO,OAAO,CAAC;SAChB;;;;QAKM,iCAAW,GAAlB;YACE,IAAM,WAAW,GAAa,EAAE,CAAC;YACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;aACnC;YACD,OAAO,WAAW,CAAC;SACpB;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,YAAY,GAAa,EAAE,CAAC;YAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;aACrC;YACD,OAAO,YAAY,CAAC;SACrB;;;;QAKM,4BAAM,GAAb;YACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;SAC1B;;;;QAKM,8BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACtC;;;;QAKM,2BAAK,GAAZ;YACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;SAC3C;QACH,kBAAC;IAAD,CAAC;;ICrOD;IACA;IAEA;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;IACrB,CAAC;IAED;;;;AAIA,aAAgB,eAAe,CAAC,KAAiB;QAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;SACtC;QACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;IACnB,CAAC;IAED;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;SACnC;QACD,OAAO,GAAG,CAAC;IACb,CAAC;;IClCD;IACA;IACA;IACA,IAAI,eAAe,CAAC;IACpB,IAAI,KAAK,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;AAC/B,IAAe,SAAS,GAAG,GAAG;IAC9B;IACA,EAAE,IAAI,CAAC,eAAe,EAAE;IACxB;IACA;IACA,IAAI,eAAe,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,eAAe,IAAI,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,OAAO,QAAQ,KAAK,WAAW,IAAI,OAAO,QAAQ,CAAC,eAAe,KAAK,UAAU,IAAI,QAAQ,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrP;IACA,IAAI,IAAI,CAAC,eAAe,EAAE;IAC1B,MAAM,MAAM,IAAI,KAAK,CAAC,0GAA0G,CAAC,CAAC;IAClI,KAAK;IACL,GAAG;AACH;IACA,EAAE,OAAO,eAAe,CAAC,KAAK,CAAC,CAAC;IAChC;;AClBA,gBAAe,qHAAqH;;sIAAC,lICErI,SAAS,QAAQ,CAAC,IAAI,EAAE;IACxB,EAAE,OAAO,OAAO,IAAI,KAAK,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACtD,CAAC;;ICHD;IACA;IACA;IACA;AACA;IACA,IAAI,SAAS,GAAG,EAAE,CAAC;AACnB;IACA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,CAAC,EAAE;IAC9B,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IACrD,CAAC;AACD;IACA,SAAS,SAAS,CAAC,GAAG,EAAE;IACxB,EAAE,IAAI,MAAM,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IACrF;IACA;IACA,EAAE,IAAI,IAAI,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,CAAC;IACzgB;IACA;IACA;IACA;AACA;IACA,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;IACvB,IAAI,MAAM,SAAS,CAAC,6BAA6B,CAAC,CAAC;IACnD,GAAG;AACH;IACA,EAAE,OAAO,IAAI,CAAC;IACd,CAAC;;ICxBD,SAAS,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE;IAClC,EAAE,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;IAC1B,EAAE,IAAI,IAAI,GAAG,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,GAAG,CAAC;AACtD;IACA,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;IAClC,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC;IACA,EAAE,IAAI,GAAG,EAAE;IACX,IAAI,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACzB;IACA,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,EAAE;IACjC,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAChC,KAAK;AACL;IACA,IAAI,OAAO,GAAG,CAAC;IACf,GAAG;AACH;IACA,EAAE,OAAO,SAAS,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;;ICrBD;IACA;AAEA,QAAa,SAAS,GAAG;;;;;;QAMvB,aAAa,EAAE,OAAO;;;;;;;QAQtB,IAAI,EAAE,OAAO;;;;;;;QAQb,KAAK,EAAE,QAAQ;;;;;;;QAQf,UAAU,EAAE,YAAY;;;;;;;QAQxB,WAAW,EAAE,aAAa;;;;QAK1B,QAAQ,EAAE,UAAU;;;;QAKpB,SAAS,EAAE,WAAW;QAEtB,aAAa,EAAE;;;;;;;YAOb,SAAS,EAAE;gBACT,GAAG,EAAE,KAAK;gBACV,GAAG,EAAE,KAAK;gBACV,MAAM,EAAE,QAAQ;gBAChB,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;gBACd,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;aACf;YAED,WAAW,EAAE;gBACX,eAAe,EAAE,GAAG;aACrB;SACF;;;;QAKD,eAAe,EAAE;;;;;;;YAOf,aAAa,EAAE,eAAe;YAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;YAU9B,WAAW,EAAE,aAAa;;;;;;;YAQ1B,UAAU,EAAE,YAAY;SACzB;KACF;;IC3GD;AACA,IAQA;;;AAGA,QAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;QAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;QACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;QAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,IAUA;;;;;;AAMA,aAAgB,SAAS,CAAC,GAAW;QACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;aAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;;;AAQA,aAAgB,aAAa,CAAC,QAA+B;QAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;QACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;QAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;QAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;QAC1C,OAAO,gBAAgB,CAAC;IAC1B,CAAC;IAED;;;;;;;;AAQA,aAAgB,YAAY,CAAC,OAAwB;QACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;QACxC,IAAI,eAAe,CAAC,OAAO,EAAE;YAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;SACjD;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED;;;;;;;AAOA,aAAgB,WAAW,CAAC,IAAY;QACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;QACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACnC,CAAC;AAED,IA4BA;;;;;AAKA,aAAgB,YAAY;QAC1B,OAAOA,EAAM,EAAE,CAAC;IAClB,CAAC;IAED;;;;;;;;;;;AAWA,aAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;QACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;YACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;SACtC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,IAeA;;;;;;AAMA,aAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;QAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;IACvE,CAAC;IAqBD;;;;;;AAMA,aAAgB,iBAAiB,CAAC,OAAqB;QACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAY;YAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;gBACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;aACrB,EACD,UAAC,GAAU;gBACT,EAAE,CAAC,GAAG,CAAC,CAAC;aACT,CACF,CAAC;SACH,CAAC;IACJ,CAAC;IAED;;;;;AAKA,aAAgB,wBAAwB,CAAI,OAAuC;QACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAsB;YAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;gBAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;aAC3E,EACD,UAAC,GAAU;gBACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;aAC3B,CACF,CAAC;SACH,CAAC;IACJ,CAAC;AAED,aAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;QAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACb;QACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;IAChC,CAAC;IAED;;;;;AAKA,aAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;QAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;YAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;gBAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;aAC1D,CAAC,CAAC;SACJ,CAAC,CAAC;IACL,CAAC;IAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;IAElM;;;;;AAKA,aAAgB,UAAU,CAAC,KAAa;QACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACzC,CAAC;IAED;;;;;;;AAOA,aAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;QAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;IAC5F,CAAC;IAED;;;;;;AAMA,aAAgB,eAAe,CAAC,KAAU;QACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;IACtF,CAAC;;IC/RD;AACA;QAME,oBACkB,YAAyC,EACzC,KAAe;YADf,6BAAA,EAAA,iBAAyC;YAAzC,iBAAY,GAAZ,YAAY,CAA6B;YACzC,UAAK,GAAL,KAAK,CAAU;SAC7B;QAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;YAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;gBACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;aACH,CAAC;YACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;gBACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;gBACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;oBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC1C;gBACD,IAAI,OAAO,EAAE;oBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;oBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;wBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;qBACpC;iBACF;gBACD,IACE,WAAW;oBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;oBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;iBAC5C;aACF;SACF;;;;;;;;;;;;QAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;YACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;YACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAC7C,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;aAC9B;;;;;;;;;;YAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;YAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;aACvD;YACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;aAC/D;YACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;gBACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;aAClD;YAED,IAAI,MAAM,IAAI,SAAS,EAAE;gBACvB,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM;;gBAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;gBACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;oBACxC,OAAO,GAAG,MAAM,CAAC;iBAClB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;oBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;iBAC/D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;oBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;oBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;iBAChF;qBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;oBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBAC9D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACrF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACzF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACvF;aACF;YACD,OAAO,OAAO,CAAC;SAChB;;;;;;;;;;;;QAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;YAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;oBAIzE,YAAY,GAAG,EAAE,CAAC;iBACnB;;gBAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;oBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;iBACpC;gBACD,OAAO,YAAY,CAAC;aACrB;YAED,IAAI,OAAY,CAAC;YACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;YACpC,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC/F;iBAAM;gBACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;oBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;qBAClC;iBACF;gBAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;oBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;wBAClB,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;oBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;wBAC3B,OAAO,GAAG,IAAI,CAAC;qBAChB;yBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;wBACnC,OAAO,GAAG,KAAK,CAAC;qBACjB;yBAAM;wBACL,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;oBACzF,OAAO,GAAG,YAAY,CAAC;iBACxB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;oBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;iBAClC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;iBACxC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;iBAC7C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;iBAC9C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC7F;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC;SAChB;QACH,iBAAC;IAAD,CAAC,IAAA;IAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;QACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;QACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;YAC1C,EAAE,GAAG,CAAC;SACP;QACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,iBAAiB,CAAC,MAAW;QACpC,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;;QAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;QAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IACnE,CAAC;IAED,SAAS,oBAAoB,CAAC,GAAW;QACvC,IAAI,CAAC,GAAG,EAAE;YACR,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;;QAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;QAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;IAClC,CAAC;IAED,SAAS,kBAAkB,CAAC,IAAwB;QAClD,IAAM,OAAO,GAAa,EAAE,CAAC;QAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;QACtB,IAAI,IAAI,EAAE;YACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;gBAAxB,IAAM,IAAI,iBAAA;gBACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;oBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;iBACvD;qBAAM;oBACL,YAAY,IAAI,IAAI,CAAC;oBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oBAC3B,YAAY,GAAG,EAAE,CAAC;iBACnB;aACF;SACF;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,cAAc,CAAC,CAAgB;QACtC,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;SAC3B;QACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IAClD,CAAC;IAED,SAAS,cAAc,CAAC,CAAS;QAC/B,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;QAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;YACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;oBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;iBAC9E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;iBAChF;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;oBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;iBACH;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;oBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;iBAC/E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;gBAChC,IACE,UAAU,KAAK,QAAQ;oBACvB,UAAU,KAAK,UAAU;oBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;oBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;oBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;iBACH;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;QAClF,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;SACH;QACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;YACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;aACnD;YACD,OAAO,IAAI,KAAK,KAAK,CAAC;SACvB,CAAC,CAAC;QACH,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;SACH;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;SAClC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;QAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBACvC,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK;oBACH,KAAK,YAAY,IAAI;0BACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;0BACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;aACtD;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;gBACzD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;iBAC7F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;wBAChF,mDAAmD,CACtD,CAAC;iBACH;gBACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;aAC/B;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;oBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;iBACH;gBACD,KAAK,GAAG,KAAK,CAAC;aACf;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;QAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;SACzD;QACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;SACzE;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;QAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;SAC1D;QACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;YAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;YAAlC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;SAC5F;QACD,OAAO,cAAc,CAAC;IACxB,CAAC;IAED;;;;;IAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;QAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;QAC7C,IAAI,CAAC,UAAU,EAAE;YACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;aACH;YAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YACvD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;aACnF;YACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,MAAM,IAAI,KAAK,CACb,qDAAqD;qBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;aACH;SACF;QAED,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;SACzE;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;YACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;gBAAtC,IAAM,GAAG,SAAA;gBACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;gBACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;oBAC3B,SAAS;iBACV;gBAED,IAAI,QAAQ,SAAoB,CAAC;gBACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;gBAChC,IAAI,UAAU,CAAC,KAAK,EAAE;oBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;qBACnC;yBAAM;wBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;qBACpE;iBACF;qBAAM;oBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;oBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;oBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;wBAAzB,IAAM,QAAQ,cAAA;wBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;wBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;4BACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;yBAC7B;wBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACvC;iBACF;gBAED,IAAI,YAAY,IAAI,SAAS,EAAE;oBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;0BAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;0BAChD,UAAU,CAAC;oBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;oBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;oBAC5F,IACE,wBAAwB;wBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;wBAC3C,WAAW,IAAI,SAAS,EACxB;wBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;qBACrC;oBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;oBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;wBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;4BAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;4BACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC5C;6BAAM,IAAI,cAAc,CAAC,YAAY,EAAE;4BACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;yBAChF;6BAAM;4BACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC1C;qBACF;iBACF;aACF;YAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;YACpE,IAAI,0BAA0B,EAAE;gBAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;wCAC/B,cAAc;oBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;oBAC5E,IAAI,oBAAoB,EAAE;wBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;qBACH;;gBARH,KAAK,IAAM,cAAc,IAAI,MAAM;4BAAxB,cAAc;iBASxB;aACF;YAED,OAAO,OAAO,CAAC;SAChB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,oBAAoB,CAAC,YAAoB;QAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;IAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;SACnF;QAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;QAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;QAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;YAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;YACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;YACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;gBACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;aACxD;YAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;YAC3F,IAAI,sBAAsB,EAAE;gBAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;gBAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAA9C,IAAM,SAAS,SAAA;oBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;wBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;qBACH;oBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;iBACtC;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;aAC5B;iBAAM,IAAI,UAAU,CAAC,KAAK,EAAE;gBAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;oBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;iBACH;qBAAM;oBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;oBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;wBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;wBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;wBAC3D,IAAI,kBAAkB,EAAE;4BACtB,iBAAiB,GAAG,EAAE,CAAC;yBACxB;qBACF;oBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;iBACH;aACF;iBAAM;;gBAEL,IAAI,gBAAgB,SAAA,CAAC;gBACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;gBAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAArB,IAAM,IAAI,cAAA;oBACb,IAAI,CAAC,GAAG;wBAAE,MAAM;oBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;iBACjB;gBACD,gBAAgB,GAAG,GAAG,CAAC;gBACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;gBAUtE,IACE,wBAAwB;oBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;oBAC3C,gBAAgB,IAAI,SAAS,EAC7B;oBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;iBAC1C;gBAED,IAAI,eAAe,SAAA,CAAC;;gBAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;oBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;oBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;oBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;wBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;4BACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;yBAC5B;qBACF;oBACD,QAAQ,GAAG,aAAa,CAAC;iBAC1B;qBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;oBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;oBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;iBACjC;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;gBACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;oBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;oBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;wBACjC,OAAO,KAAK,CAAC;qBACd;iBACF;gBACD,OAAO,IAAI,CAAC;aACb,CAAC;YAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;gBAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;oBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;iBACH;aACF;SACF;aAAM,IAAI,YAAY,EAAE;YACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;oBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;oBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;oBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBACnC;aACF;SACF;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;YAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;aACpF;YACD,OAAO,cAAc,CAAC;SACvB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;gBAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;aAC/B;YAED,IAAM,SAAS,GAAG,EAAE,CAAC;YACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;aACxF;YACD,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;QAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAC5F,IAAI,wBAAwB,EAAE;YAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;YAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;gBAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;gBACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;oBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;oBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;0BAC3B,kBAAkB;0BAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;oBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;oBACrF,IAAI,iBAAiB,EAAE;wBACrB,MAAM,GAAG,iBAAiB,CAAC;qBAC5B;iBACF;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;QAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;YACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;YACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;IACJ,CAAC;IAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;QAClF,QACE,QAAQ;YACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;YACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;IACJ,CAAC;IAoHD;AACA,aAAgB,eAAe,CAAC,WAAgB;QAC9C,IAAI,WAAW,IAAI,SAAS;YAAE,OAAO,SAAS,CAAC;QAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;YACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;YAClD,OAAO,WAAW,CAAC;SACpB;aAAM,IAAI,WAAW,YAAY,IAAI,EAAE;YACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;SAClC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;YACrC,IAAM,KAAK,GAAG,EAAE,CAAC;YACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;aAC7C;YACD,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;YAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;gBAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;aAC/D;YACD,OAAO,UAAU,CAAC;SACnB;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;;IAGA,SAAS,OAAO,CAAmB,CAAW;QAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;YAAhB,IAAM,GAAG,UAAA;YACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,QAAa,UAAU,GAAG,OAAO,CAAC;QAChC,WAAW;QACX,SAAS;QACT,WAAW;QACX,WAAW;QACX,MAAM;QACN,UAAU;QACV,iBAAiB;QACjB,YAAY;QACZ,MAAM;QACN,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,UAAU;KACX,CAAC;;ICtiCF;AACA,aAgKgB,iBAAiB,CAAC,MAAW;QAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,KAAK,CAAC;SACd;QACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;YAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;YACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;YAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;YACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;YACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;YACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;AAQA;QAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;YAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;YACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;YAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;YAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;YAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;YACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;SACpC;;;;;;QAOD,+CAAyB,GAAzB;YACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;aACjD;SACF;;;;;;QAOD,6BAAO,GAAP,UAAQ,OAA8B;YACpC,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;aAC/C;YAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;aACrD;YAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;aACH;YAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;iBACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;gBACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;;YAGD,IAAI,OAAO,CAAC,GAAG,EAAE;gBACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;oBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;iBAC1D;gBACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;aACxB;;YAGD,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;gBAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;wBACrB,OAAO,CAAC,MAAM;wBACd,4CAA4C;wBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;iBACH;aACF;YACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;YAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;gBAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;gBACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;oBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;iBACnE;gBACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;oBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;iBAClD;gBACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,IAAI,KAAG,GACL,OAAO;qBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;qBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;gBACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;gBAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;oBAC/B,IAAI,CAAC,gBAAc,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;qBACH;oBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;wBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;wBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;wBAC5E,IACE,SAAS,KAAK,IAAI;4BAClB,SAAS,KAAK,SAAS;4BACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;4BACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;iCAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;iCACJ,8EAA0E,aAAa,kCAA6B,CAAA;iCACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;yBACH;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;yBACxD;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;gCACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;6BACH;4BACD,IAAI,SAAS,CAAC,eAAe,EAAE;gCAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;6BAC1C;iCAAM;gCACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;6BAC9D;yBACF;qBACF,CAAC,CAAC;iBACJ;gBACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;aAChB;;YAGD,IAAI,OAAO,CAAC,eAAe,EAAE;gBAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;gBAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;wBAC3E,yFAAqF;wBACrF,mJAA2I,CAC9I,CAAC;iBACH;;gBAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;iBACjB;;gBAED,IAAM,WAAW,GAAG,EAAE,CAAC;;gBAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;gBAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;oBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;oBACxD,IAAI,UAAU,EAAE;wBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;4BACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;yBAC7D;6BAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;6BACH;4BACD,IAAI,UAAU,CAAC,eAAe,EAAE;gCAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;gCAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;6BAC/C;iCAAM;gCACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;gCAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;6BACnE;yBACF;qBACF;iBACF;;gBAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACnC;;YAGD,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAAlD,IAAM,UAAU,SAAA;oBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;iBACnD;aACF;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;aAC9C;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;gBAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;aAC5D;;YAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;gBACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;aACrE;;YAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;YACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;gBAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;oBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;wBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;qBAClD;oBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;wBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;qBAC9D;iBACF;qBAAM;oBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;wBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;qBACH;oBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;wBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;qBAC1C;iBACF;aACF;YAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;YACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;YAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YAErD,OAAO,IAAI,CAAC;SACb;;;;;QAMD,2BAAK,GAAL;YACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;YAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;aACjC;YAED,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;gBAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;aACnD;YAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;gBAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;aAC/D;YAED,OAAO,MAAM,CAAC;SACf;QACH,kBAAC;IAAD,CAAC;;IChhBD;IACA;AACA;IACA;IACA;AACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;AACA;IACA,IAAI,aAAa,GAAG,SAAS,CAAC,EAAE,CAAC,EAAE;IACnC,IAAI,aAAa,GAAG,MAAM,CAAC,cAAc;IACzC,SAAS,EAAE,SAAS,EAAE,EAAE,EAAE,YAAY,KAAK,IAAI,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,CAAC;IACpF,QAAQ,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACnF,IAAI,OAAO,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC;AACF;AACA,IAAO,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;IAChC,IAAI,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACxB,IAAI,SAAS,EAAE,GAAG,EAAE,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE;IAC3C,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,SAAS,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;IACzF,CAAC;AACD;AACA,IAAO,IAAI,QAAQ,GAAG,WAAW;IACjC,IAAI,QAAQ,GAAG,MAAM,CAAC,MAAM,IAAI,SAAS,QAAQ,CAAC,CAAC,EAAE;IACrD,QAAQ,KAAK,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;IAC7D,YAAY,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;IAC7B,YAAY,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACzF,SAAS;IACT,QAAQ,OAAO,CAAC,CAAC;IACjB,MAAK;IACL,IAAI,OAAO,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;IAC3C,EAAC;AACD,AA2BA;AACA,IAAO,SAAS,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE;IAC7D,IAAI,SAAS,KAAK,CAAC,KAAK,EAAE,EAAE,OAAO,KAAK,YAAY,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC,UAAU,OAAO,EAAE,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;IAChH,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;IAC/D,QAAQ,SAAS,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACnG,QAAQ,SAAS,QAAQ,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACtG,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,EAAE;IACtH,QAAQ,IAAI,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,KAAK,CAAC,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9E,KAAK,CAAC,CAAC;IACP,CAAC;AACD;AACA,IAAO,SAAS,WAAW,CAAC,OAAO,EAAE,IAAI,EAAE;IAC3C,IAAI,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IACrH,IAAI,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,WAAW,EAAE,OAAO,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IAC7J,IAAI,SAAS,IAAI,CAAC,CAAC,EAAE,EAAE,OAAO,UAAU,CAAC,EAAE,EAAE,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;IACtE,IAAI,SAAS,IAAI,CAAC,EAAE,EAAE;IACtB,QAAQ,IAAI,CAAC,EAAE,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC;IACtE,QAAQ,OAAO,CAAC,EAAE,IAAI;IACtB,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACzK,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IACpD,YAAY,QAAQ,EAAE,CAAC,CAAC,CAAC;IACzB,gBAAgB,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM;IAC9C,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;IACxE,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS;IACjE,gBAAgB,KAAK,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IACjE,gBAAgB;IAChB,oBAAoB,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE;IAChI,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;IAC1G,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE;IACzF,oBAAoB,IAAI,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE;IACvF,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;IAC1C,oBAAoB,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IAC3C,aAAa;IACb,YAAY,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IACvC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;IAClE,QAAQ,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IACzF,KAAK;IACL,CAAC;AACD,AA4CA;AACA,IAAO,SAAS,cAAc,GAAG;IACjC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IACxF,IAAI,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;IACpD,QAAQ,KAAK,IAAI,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE;IACzE,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,OAAO,CAAC,CAAC;IACb,CAAC;;IC3JD;AACA;QAK+B,6BAAK;QAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;YANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;YAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;SAClD;QAzBe,4BAAkB,GAAW,oBAAoB,CAAC;QAClD,+BAAqB,GAAW,uBAAuB,CAAC;QACxD,qBAAW,GAAW,aAAa,CAAC;QAwBtD,gBAAC;KAAA,CA3B8B,KAAK;;ICNpC;AACA,IAQA;;;AAGA;QAAA;SAwGC;QAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;YAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YAED,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;aACvE;YAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACxC,IAAI,WAAW,EAAE;gBACf,IAAM,UAAQ,GAAG;oBACf,GAAG,CAAC,KAAK,EAAE,CAAC;iBACb,CAAC;gBACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;gBAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;wBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;qBACpD;iBACF,CAAC,CAAC;aACJ;YAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;YAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;gBACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;gBAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;gBACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;oBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;wBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;qBACrD;yBAAM;wBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;qBAChC;iBACF,CAAC;gBACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;oBAAxC,IAAM,OAAO,SAAA;oBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;oBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;wBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;4BACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;yBACxC;qBACF;yBAAM;wBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;qBACrC;iBACF;gBAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;gBAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;gBAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;oBAEpE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBACxC;aACF;YAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;YACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;gBAAhD,IAAM,MAAM,SAAA;gBACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;aACjD;YACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,GAAG,MAAM,GAAG,MAAM,CAAC;;YAGhE,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;YAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;gBAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;oBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;;wBAEvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;4BACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;gCACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;iCACvB,CAAC,CAAC;gCACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;6BAC7C,CAAC,CAAC;4BACH,OAAO,CAAC;gCACN,OAAO,SAAA;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,QAAQ,UAAA;6BACT,CAAC,CAAC;yBACJ;qBACF,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;iBAAM;gBACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;oBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;wBAC3B,OAAA,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;yBAC7B,CAAC;qBAAA,CACH,CAAC;oBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;SACF;QACH,oBAAC;IAAD,CAAC,IAAA;IAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;QAEpD,IAAI,QAAQ,EAAE;YACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;gBACxC,OAAA,QAAQ,CAAC;oBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;iBAC7B,CAAC;aAAA,CACH,CAAC;SACH;IACH,CAAC;IAED;AACA,aAAgB,YAAY,CAAC,GAAmB;QAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;QAC1C,IAAM,WAAW,GAAG,GAAG;aACpB,qBAAqB,EAAE;aACvB,IAAI,EAAE;aACN,KAAK,CAAC,SAAS,CAAC,CAAC;QACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;YAA3B,IAAM,IAAI,oBAAA;YACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;YACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;YAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;SAC9C;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;QAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;YAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;IACJ,CAAC;;ICjLD;AACA,IAKA,WAAY,oBAAoB;;;;QAI9B,6DAAG,CAAA;;;;QAKH,iEAAK,CAAA;;;;QAKL,qEAAO,CAAA;;;;QAKP,+DAAI,CAAA;IACN,CAAC,EApBWG,4BAAoB,KAApBA,4BAAoB,QAoB/B;;IC1BD;IACA;IA2EA;;;;;AAKA,aAAgB,iBAAiB,CAAC,UAAmB;;;;;;QAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;QACF,QACE,cAAc;YACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;aAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;IACJ,CAAC;;IChGD;IACA;IAkDA;;;;;AAKA,aAAgB,0BAA0B,CAAC,SAA6B;QACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;IACnF,CAAC;AAED,aAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;QAEd,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,MAAM,GAAG,aAAa,CAAC;SACxB;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAClC;aAAM;YACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;SACjC;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICzED;AACA,aAmFgB,iBAAiB,CAAC,aAA4B;QAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;QACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;YAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;YACjF,IACE,iBAAiB,CAAC,UAAU;gBAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;gBACA,MAAM,GAAG,IAAI,CAAC;gBACd,MAAM;aACP;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICjGD;IACA;IAEA,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;AAC/B,aAAgB,QAAQ,CAAC,GAAW;QAClC,IAAI;YACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;YAC3D,YAAY,CAAC,GAAG,CAAC,CAAC;YAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;IACH,CAAC;IAED,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI;QACF,OAAO,GAAG,MAAM,CAAC,eAAe,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;aAC3F,YAAa,CAAC;KAClB;IAAC,OAAO,OAAO,EAAE;;KAEjB;IAED,SAAS,YAAY,CAAC,GAAa;QACjC,IAAI,OAAO,EAAE;YACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACxE,IAAI,YAAY,CAAC,MAAM,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;aAClD;SACF;IACH,CAAC;IAED,SAAS,SAAS,CAAC,IAAU;QAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;IACxC,CAAC;IAED;;;;IAIA,SAAS,uBAAuB,CAAC,IAAU;QACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,GAAG,IAAI,GAAG,SAAS,CAAC;IACpE,CAAC;IAED,SAAS,WAAW,CAAC,IAAU;QAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;QAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;YACb,cAAc,KAAK,CAAC;YACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;YAC1C,cAAc,CAAC,SAAS;YAC1B,SAAS,CAAC;QAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;QACjF,IAAI,qBAAqB,EAAE;YACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;gBACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;aAC7C;YAED,IAAI,kBAAkB,EAAE;gBACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;aAClC;SACF;aAAM,IAAI,cAAc,KAAK,CAAC,EAAE;YAC/B,MAAM,GAAG,EAAE,CAAC;SACb;aAAM,IAAI,kBAAkB,EAAE;YAC7B,MAAM,GAAG,kBAAkB,CAAC;SAC7B;QAED,IAAI,CAAC,kBAAkB,EAAE;YACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;gBACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;;gBAEjC,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;oBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;oBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;wBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;qBACtC;yBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;wBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;qBAC1C;yBAAM;wBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;qBAChE;iBACF;aACF;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;IACA,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,aAAgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;QACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,KAAK,MAAM,CAAC;QACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;QACxC,QACE,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,EAC7F;IACJ,CAAC;IAED,SAAS,eAAe,CAAC,KAAgD;QACvE,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;YAAjC,IAAM,GAAG,SAAA;YACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;YACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;YACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;QAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;YAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;YAClC,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;YAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;gBAAxB,IAAM,SAAS,YAAA;gBAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;oBAAlD,IAAM,KAAK,SAAA;oBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;iBACpB;aACF;YACD,OAAO,MAAM,CAAC;SACf;aAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;YAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;gBAA/B,IAAM,GAAG,SAAA;gBACZ,IAAI,GAAG,KAAK,GAAG,EAAE;oBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;wBAAzC,IAAM,IAAI,SAAA;wBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;qBACpC;iBACF;qBAAM;oBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAAzC,IAAM,KAAK,SAAA;wBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;qBACzB;iBACF;aACF;YACD,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;SAChE;IACH,CAAC;;ICpJD;AACA;QAmBE,2BACW,WAA0B,EAC1B,QAAkC;YADlC,gBAAW,GAAX,WAAW,CAAe;YAC1B,aAAQ,GAAR,QAAQ,CAA0B;SACzC;;;;;;QASG,qCAAS,GAAhB,UAAiB,QAA8B;YAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;SAC1C;;;;;;;QAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACtC;QACH,wBAAC;IAAD,CAAC,IAAA;IAsBD;;;AAGA;QACE,8BAAoB,OAA4B;YAA5B,YAAO,GAAP,OAAO,CAAqB;SAAI;;;;;;QAO7C,wCAAS,GAAhB,UAAiB,QAA8B;YAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;gBACd,QAAQ,KAAKA,4BAAoB,CAAC,GAAG;gBACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;SACH;;;;;;;QAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;gBAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;aACrC;SACF;QACH,2BAAC;IAAD,CAAC;;ICjGD;AACA,IAmCA;;;;AAIA,aAAgB,qBAAqB,CACnC,2BAAyD;QAEzD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;AAED,IAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,IAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;IAElF;;;;IAIA;QAA2C,yCAAiB;QAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;YAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;YAJC,KAAI,CAAC,gBAAgB;gBACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;YAC/F,KAAI,CAAC,eAAe;gBAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;SAC9F;QAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;oBAC/C,sBAAO,IAAI,CAAC,WAAW;6BACpB,WAAW,CAAC,OAAO,CAAC;6BACpB,IAAI,CAAC,UAAC,QAA+B;4BACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;yBAAA,CAC/E,EAAC;;;SACL;QACH,4BAAC;IAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;IAED,SAAS,oBAAoB,CAC3B,cAAqC;QAErC,IAAI,MAAqC,CAAC;QAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;QACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;QACvE,IAAI,aAAa,EAAE;YACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;YAC1E,IAAI,CAAC,uBAAuB,EAAE;gBAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;aACzD;iBAAM;gBACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aACjE;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,yBAAyB,CAAC,cAAqC;QACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;QAC3C,IAAI,MAAe,CAAC;QACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,MAAM,GAAG,IAAI,CAAC;SACf;aAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;YACjD,MAAM,GAAG,iBAAiB,CAAC;SAC5B;aAAM;YACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;SAC5C;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;QAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;YAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;YAC7E,IAAI,iBAAiB,EAAE;gBACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;gBACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;oBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;oBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;oBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;yBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;oBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;oBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;0BAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;0BACrC,CAAC,CAAC,YAAY,CAAC;oBACnB,IAAI,CAAC,oBAAoB,EAAE;wBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;wBAC/E,IAAI,mBAAmB,EAAE;4BACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;kCAChE,6BAA2B,UAAY;kCACtC,cAAc,CAAC,UAAqB,CAAC;4BAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;4BACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;4BAC9B,KAAK,CAAC,OAAO,GAAGC,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;4BAC5E,IAAI;gCACF,IAAI,mBAAmB,EAAE;oCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;oCACjC,IACE,yBAAyB;wCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;wCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCACjD;wCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;4CAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;yCACvC;wCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;4CAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;yCAC7C;qCACF;yCAAM;wCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;wCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCAC3C;wCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;wCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;4CACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;yCACvC;qCACF;oCAED,IAAI,yBAAyB,EAAE;wCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;wCAClD,IACE,aAAa,CAAC,KAAK;4CACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;4CACA,kBAAkB;gDAChB,OAAO,mBAAmB,KAAK,QAAQ;sDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;sDAC9D,EAAE,CAAC;yCACV;wCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;qCACH;iCACF;6BACF;4BAAC,OAAO,YAAY,EAAE;gCACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;6BAC7J;4BACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;yBAC9B;qBACF;yBAAM,IAAI,YAAY,EAAE;wBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;4BAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;4BACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;gCACpF,kBAAkB;oCAChB,OAAO,kBAAkB,KAAK,QAAQ;0CAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;0CAC3D,EAAE,CAAC;6BACV;4BACD,IAAI;gCACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;6BACH;4BAAC,OAAO,KAAK,EAAE;gCACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;gCACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;gCAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;gCACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;6BAClC;yBACF;6BAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;4BAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;yBAC7E;wBAED,IAAI,YAAY,CAAC,aAAa,EAAE;4BAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;yBACH;qBACF;iBACF;aACF;YACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;SACxC,CAAC,CAAC;IACL,CAAC;IAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;QAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;YACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;YACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;YAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;YACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;SAC1B,CAAC;QAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;YACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;YAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;kBAC5C,EAAE;kBACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;YACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;gBAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;gBACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;oBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;oBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;iBAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;iBAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;gBAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;qBAClB,IAAI,CAAC,UAAC,IAAI;oBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;oBACpC,OAAO,iBAAiB,CAAC;iBAC1B,CAAC;qBACD,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC5C,CAAC;;ICrSD;AACA,aAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;IAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;IACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;IACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;IAEnD;;;;IAIA;QAA4C,0CAAiB;;;;;;;;;;QA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAXC,SAAS,QAAQ,CAAC,CAAM;gBACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;aAC9B;YACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;YACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;YAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;YACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;;SACvC;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAKC;YAJC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;iBAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAC7E;QACH,6BAAC;IAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;IAED;;;;;;;;IAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;QAEpB,IACE,UAAU,IAAI,SAAS;aACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QAED,IAAI,YAAoB,CAAC;QACzB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;;IAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;YAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;QAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;QAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;QAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;YAC7E,OAAOC,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;iBAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;iBAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;iBAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;SACrE;aAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;YAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;gBACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;YACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;aAAM;YACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;SAClC;IACH,CAAC;;IC3ND;AACA,aAYgB,6BAA6B,CAC3C,mBAA8C;QAA9C,oCAAA,EAAA,8CAA8C;QAE9C,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;IAED;QAAmD,iDAAiB;QAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;YAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;SAGrC;QAEM,mDAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEC,YAAkB,EAAE,CAAC,CAAC;aACtE;YACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QACH,oCAAC;IAAD,CAfA,CAAmD,iBAAiB,GAenE;;ICtCD;IACA;AAcA,aAAgB,sBAAsB;QACpC,OAAO,mBAAmB,CAAC;IAC7B,CAAC;AAED,aAAgB,uBAAuB;QACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;QAChD,IAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,EAAE,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;SAChE,CAAC;QAEF,OAAO,CAAC,MAAM,CAAC,CAAC;IAClB,CAAC;;IC3BD;AACA,IAgBA,SAAS,cAAc;QACrB,IAAM,aAAa,GAAG;YACpB,GAAG,EAAE,YAAY;YACjB,KAAK,EAAE,SAAS,CAAC,aAAa;SAC/B,CAAC;QAEF,OAAO,CAAC,aAAa,CAAC,CAAC;IACzB,CAAC;IAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;QADpB,6BAAA,EAAA,kBAAkB;QAClB,+BAAA,EAAA,oBAAoB;QAEpB,OAAO,aAAa;aACjB,GAAG,CAAC,UAAC,IAAI;YACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;YACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;SAC9B,CAAC;aACD,IAAI,CAAC,YAAY,CAAC,CAAC;IACxB,CAAC;AAED,IAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,aAAgB,wBAAwB;QACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;QACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;QACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAC/E,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,eAAe,CAAC,aAA6B;QAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;QAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;cAC9C,wBAAwB,EAAE;cAC1B,aAAa,CAAC,KAAK,CAAC;QAE1B,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;aAC7D;SACF,CAAC;IACJ,CAAC;IAED;QAAqC,mCAAiB;QACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;YAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;YANU,iBAAW,GAAX,WAAW,CAAe;YAC1B,cAAQ,GAAR,QAAQ,CAA0B;YACjC,eAAS,GAAT,SAAS,CAAQ;YACjB,iBAAW,GAAX,WAAW,CAAQ;;SAG9B;QAED,qCAAW,GAAX,UAAY,OAAwB;YAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;YACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,4CAAkB,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;gBAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;aACvD;SACF;QACH,sBAAC;IAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ICvFD;AACA,IAMA;;;AAGA;QAAA;YACmB,cAAS,GAAwD,EAAE,CAAC;SAqHtF;;;;QAhHQ,sBAAG,GAAV;YACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;SAC/C;;;;;;QAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;YACnD,IAAI,aAAa,EAAE;gBACjB,IAAI,cAAc,IAAI,SAAS,EAAE;oBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;oBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;iBAC1C;qBAAM;oBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;iBACtC;aACF;SACF;;;;;QAMM,sBAAG,GAAV,UAAW,aAAqB;YAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;SAClE;;;;QAKM,2BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;gBAC1C,IAAI,MAAM,EAAE;oBACV,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;gBACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;oBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;oBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;wBAA/C,IAAM,qBAAqB,uBAAA;wBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;qBACpE;oBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACtC;qBAAM;oBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;iBAChD;aACF;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;YAE9B,IAAI,IAAI,EAAE;gBACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,YAAY,GAAuB,eAAe,CAAC;gBAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;gBACvB,IAAI,cAAc,GAAG,EAAE,CAAC;gBACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;oBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;oBACzC,QAAQ,YAAY;wBAClB,KAAK,eAAe;4BAClB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,YAAY,GAAG,gBAAgB,CAAC;oCAChC,MAAM;gCAER,KAAK,GAAG;oCACN,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,MAAM;gCAER;oCACE,aAAa,IAAI,gBAAgB,CAAC;oCAClC,MAAM;6BACT;4BACD,MAAM;wBAER,KAAK,gBAAgB;4BACnB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;oCAC1C,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,YAAY,GAAG,eAAe,CAAC;oCAC/B,MAAM;gCAER;oCACE,cAAc,IAAI,gBAAgB,CAAC;oCACnC,MAAM;6BACT;4BACD,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;qBACzE;iBACF;gBACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;oBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;iBAC3C;aACF;YAED,OAAO,MAAM,CAAC;SACf;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;AAGA;QAAA;SAiPC;;;;;QAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;YACzC,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;aAC1B;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;aAC5B;SACF;;;;QAKM,8BAAS,GAAhB;YACE,OAAO,IAAI,CAAC,OAAO,CAAC;SACrB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;aAClC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAiC;YAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;gBACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;aACnC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;gBACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;oBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;oBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;iBAC9E;qBAAM;oBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;iBACxB;aACF;SACF;;;;;QAMM,+BAAU,GAAjB,UAAkB,IAAwB;YACxC,IAAI,IAAI,EAAE;gBACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACrD,IAAI,WAAW,EAAE;oBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAC9B,WAAW,IAAI,GAAG,CAAC;qBACpB;oBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;wBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;qBAC1B;oBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;iBAC3B;gBACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;QAKM,6BAAQ,GAAf,UAAgB,KAAyB;YACvC,IAAI,CAAC,KAAK,EAAE;gBACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;aACzB;iBAAM;gBACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;aACrC;SACF;;;;;;QAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;YAC3E,IAAI,kBAAkB,EAAE;gBACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;oBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;iBAC9B;gBACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;aAC1D;SACF;;;;;QAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;YACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;SACtE;;;;QAKM,6BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;SACzD;;;;QAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;YACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;YAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;gBACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;gBACxD,IAAI,KAAK,EAAE;oBACT,QAAQ,KAAK,CAAC,IAAI;wBAChB,KAAK,QAAQ;4BACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACvC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;gCAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;6BACxB;4BACD,MAAM;wBAER,KAAK,OAAO;4BACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;4BACzC,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;qBAC/D;iBACF;aACF;SACF;QAEM,6BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;aAChC;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;aAC5B;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBAC/B,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;gBACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;aACxC;YAED,OAAO,MAAM,CAAC;SACf;;;;;QAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;YACzD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;aACvE;SACF;QAEa,gBAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;YAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;QACH,iBAAC;IAAD,CAAC,IAAA;IAMD;QACE,kBAAmC,IAAY,EAAkB,IAAkB;YAAhD,SAAI,GAAJ,IAAI,CAAQ;YAAkB,SAAI,GAAJ,IAAI,CAAc;SAAI;QAEzE,eAAM,GAApB,UAAqB,IAAY;YAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;SACrC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACpC;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;;AAIA,aAAgB,uBAAuB,CAAC,SAAiB;QACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;aACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;aACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;IACJ,CAAC;IAED;;;IAGA;QAME,sBAA4B,KAAa,EAAE,KAAyB;YAAxC,UAAK,GAAL,KAAK,CAAQ;YACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;YAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;YACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;SACxB;;;;;QAMM,8BAAO,GAAd;YACE,OAAO,IAAI,CAAC,aAAa,CAAC;SAC3B;;;;QAKM,2BAAI,GAAX;YACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;gBAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;aAChC;iBAAM;gBACL,QAAQ,IAAI,CAAC,aAAa;oBACxB,KAAK,QAAQ;wBACX,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjB,MAAM;oBAER,KAAK,gBAAgB;wBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;wBACvB,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,OAAO;wBACV,SAAS,CAAC,IAAI,CAAC,CAAC;wBAChB,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;iBAC5E;aACF;YACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;SAC7B;QACH,mBAAC;IAAD,CAAC,IAAA;IAED;;;IAGA,SAAS,aAAa,CAAC,SAAuB;QAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;YACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;SACjD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;IACzD,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;IAClD,CAAC;IAED;;;;IAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;QAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,CAAC,CAAC;aACV;YACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;SACjC;IACH,CAAC;IAED;;;;IAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;QACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;QAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;YACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;SAClC;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;IACtE,CAAC;IAED;;;;IAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;QACnF,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;YAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;gBAChC,MAAM;aACP;iBAAM;gBACL,MAAM,IAAI,gBAAgB,CAAC;gBAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;aAC1B;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;IAIA,SAAS,sBAAsB,CAAC,SAAuB;QACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;IACzF,CAAC;IAED;;;;IAIA,SAAS,kBAAkB,CAAC,SAAuB;QAAE,+BAAkC;aAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;YAAlC,8CAAkC;;QACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;IACJ,CAAC;IAED,SAAS,UAAU,CAAC,SAAuB;QACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;QACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;IACH,CAAC;IAED,SAAS,gBAAgB,CAAC,SAAuB;QAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;gBACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;SACF;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;aACnC;SACF;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;SAC7B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;QACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,SAAS,CAAC,SAAuB;QACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;QAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;IACnC,CAAC;;IClpBD;AACA,IA4BO,IAAM,sBAAsB,GAAoB;QACrD,eAAe,EAAE,IAAI;QACrB,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,aAAgB,cAAc,CAAC,cAAmB;QAAnB,+BAAA,EAAA,mBAAmB;QAChD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;aAChE;SACF,CAAC;IACJ,CAAC;IAED;QAAoC,kCAAiB;QACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;YAAf,2BAAA,EAAA,eAAe;YAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,gBAAU,GAAV,UAAU,CAAK;;SAGzB;QAEM,oCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;SAC1D;QACH,qBAAC;IAAD,CAdA,CAAoC,iBAAiB,GAcpD;IAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;QAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;QACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QACxD,IACE,cAAc;aACb,MAAM,KAAK,GAAG;iBACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;gBAC7C,MAAM,KAAK,GAAG,CAAC;aAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;iBAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;YACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;YAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;YAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;gBACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;gBACvB,OAAO,OAAO,CAAC,IAAI,CAAC;aACrB;YAED,OAAO,MAAM,CAAC,WAAW;iBACtB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;iBAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SACpD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;QAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;YACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;YAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;SACzB;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;;aC5Fe,oBAAoB,CAAC,YAAiB;QAAjB,6BAAA,EAAA,iBAAiB;QACpD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;aACpE;SACF,CAAC;IACJ,CAAC;IAED;QAA0C,wCAAiB;QACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;YAAlB,8BAAA,EAAA,kBAAkB;YAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,mBAAa,GAAb,aAAa,CAAK;;SAG5B;QAEM,0CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,2BAAC;IAAD,CAdA,CAA0C,iBAAiB,GAc1D;IAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;QAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;YACxE,IAAI,MAAM,EAAE;gBACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;qBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;qBAClB,IAAI,CAAC,UAAC,kBAAkB;oBACvB,IAAI,kBAAkB,EAAE;;;wBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;wBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;qBACxD;oBACD,OAAO,QAAQ,CAAC;iBACjB,CAAC,EACJ;aACH;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED;;;;;;IAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;QAAnB,4BAAA,EAAA,mBAAmB;QAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;QAC5D,IAAI,WAAW,EAAE;YACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;SACtC;;;QAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;QAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;QAE1E,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;;;;;IAMA,SAAS,yBAAyB,CAAC,IAAY;QAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;QACzB,IAAI,IAAI,EAAE;YACR,IAAI;gBACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aACjC;YAAC,OAAO,GAAG,EAAE;;aAEb;YACD,IACE,YAAY;gBACZ,YAAY,CAAC,KAAK;gBAClB,YAAY,CAAC,KAAK,CAAC,OAAO;gBAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;gBACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;gBACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC/D,IAAI,QAAQ,EAAE;oBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;iBACzB;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;IAMA,SAAS,sBAAsB,CAAC,GAAW;QACzC,IAAI,MAAM,CAAC;QACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;SACtB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;SAClF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;;IASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;QAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;QACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;QAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;QACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;QAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;QAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;YAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;aAC7F;YACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;SAC/D,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;;IASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;QAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;QAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;QACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;QAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;YACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;YAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;gBACrF,OAAO,IAAI,CAAC;aACb;iBAAM;gBACL,OAAOC,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;qBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;aACpE;SACF,CAAC,CAAC;IACL,CAAC;;ICpMD;AACA,aAYgB,aAAa,CAC3B,sBAAgD;QAEhD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;aACvE;SACF,CAAC;IACJ,CAAC;IAED;QAAmC,iCAAiB;QAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;YAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;SAGxD;QAED,mCAAW,GAAX,UAAY,OAAwB;YAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SACzD;QAEM,mCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;gBAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;aAAA,CAC1C,CAAC;SACH;QACH,oBAAC;IAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ICzCD;AACA,aAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;IAUA;QAA4C,0CAAiB;QAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;YAC1C,gCAA0B,GAAG,CAAC,CAAC;YAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;YAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;YAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;YAChG,KAAI,CAAC,aAAa;gBAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;YACzF,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;YAC7C,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;;SAC9C;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAC,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,6BAAC;IAAD,CArCA,CAA4C,iBAAiB,GAqC5D;IAED;;;;;;;IAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;QACvE,IAAI,YAAY,CAAC;QACjB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;IAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;wBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;8BAElD,GAAG;4BACH,GAAG,CAAC,IAAI;4BACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;6BAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;gCACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;gCAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;gCAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;gCACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;wBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;wBAA1C,SAA0C,CAAC;wBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;wBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;wBAGrE,IAAI,GAAG,EAAE;;4BAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;yBACxC;wBACD,sBAAO,iBAAiB,EAAC;;;;;KAE5B;;IC1LD;AACA,IAKA,WAAY,qBAAqB;QAC/B,kCAAS,CAAA;QACT,kCAAS,CAAA;QACT,mCAAU,CAAA;QACV,oCAAW,CAAA;QACX,wCAAe,CAAA;IACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ICZD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;IChCD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,uBAAuB,CAAC,SAAkB;QACxD,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;ICpCD;AACA,IAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;IACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,aAAgB,qBAAqB,CACnC,UAAwC;QAAxC,2BAAA,EAAA,gCAAwC;QAExC,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;aACnE;SACF,CAAC;IACJ,CAAC;IAED;;;;;;IAMA;QAA2C,yCAAiB;QAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;YAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;SAC9B;QAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;oBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;4BACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;yBAC7C,CAAC,EAAC;;;SACJ;QAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;4BAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;gCACvD,sBAAO,YAAY,EAAC;6BACrB;4BAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;kCAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;4BAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;iCACE,SAAS,EAAT,wBAAS;4BACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;4BAAtB,SAAsB,CAAC;4BACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;4BAArD,GAAG,GAAG,SAA+C;4BAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;gCAIxD,sBAAO,YAAY,EAAC;;;;SACrB;QAEa,2CAAqB,GAAnC,UAAoC,WAAmB;YACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;YAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;gBACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;aACrE;iBAAM;gBACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;aACnC;SACF;QAEa,+CAAyB,GAAvC,UAAwC,WAAmB;YACzD,IAAI;gBACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;gBAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;gBAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;aAC9C;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,SAAS,CAAC;aAClB;SACF;QACH,4BAAC;IAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ICxGD;AACA,IASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA,IAAO,IAAM,6BAA6B,GAAG;QAC3C,gCAAgC;QAChC,qCAAqC;QACrC,sCAAsC;QACtC,gCAAgC;KACjC,CAAC;IAEF;;;;AAIA;QAGE,wCACE,oBAAqC,EACrC,MAAmE;YAAnE,uBAAA,EAAA,gDAAmE;YAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;SACtB;QAEY,iDAAQ,GAArB;;;;;gCACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;4BAAnE,WAAW,GAAG,SAAqD;4BACzE,IAAI,WAAW,KAAK,IAAI,EAAE;gCAClB,MAAM,GAAkB;oCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;oCAC9B,SAAS,EAAE,4BAA4B;oCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;iCAC1C,CAAC;gCACF,sBAAO,MAAM,EAAC;6BACf;iCAAM;gCACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;6BAC/C;;;;SACF;QAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;gCACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;4BAArC,aAAa,GAAG,SAAqB;4BAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrBC,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;4BACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;SACrC;QACH,qCAAC;IAAD,CAAC;;IC3DD;AACA,IA+JA;;;;AAIA;;;;;;;QAgCE,uBACE,WAAwD,EACxD,OAA8B;YAE9B,IAAI,CAAC,OAAO,EAAE;gBACZ,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;aAChC;YAED,IAAI,wBAA8D,CAAC;YACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;gBAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;oBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;iBACvC;gBACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;aACnF;iBAAM;gBACL,wBAAwB,GAAG,WAAW,CAAC;aACxC;YAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;gBACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;aAC/E;YAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;YACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,aAAiB,EAAE,CAAC;YACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAElF,IAAI,sBAA8C,CAAC;YACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;gBACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;aACzD;iBAAM;gBACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;gBACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;oBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;oBACpF,IAAI,yBAAyB,EAAE;wBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;qBACpD;iBACF;aACF;YACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;SACvD;;;;QAKD,mCAAW,GAAX,UAAY,OAAgD;YAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;gBAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;YAED,IAAI,WAA4B,CAAC;YACjC,IAAI;gBACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;oBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;oBACpC,WAAW,GAAG,OAAO,CAAC;iBACvB;qBAAM;oBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;oBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAC5C;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAC9B;YAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;YACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;oBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;iBACH;aACF;YACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAC9C;;;;;;;QAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;YAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;gBACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;gBACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;aACxC;YAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;YAEtC,IAAI,MAA6B,CAAC;YAClC,IAAI;gBACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;gBAC1E,IAAI,CAAC,OAAO,EAAE;oBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;iBACH;gBAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;gBAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;gBAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBACzD,IAAI,aAAa,CAAC,IAAI,EAAE;oBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;wBAAnD,IAAM,YAAY,SAAA;wBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;wBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;4BAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;yBAC3D;wBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;qBACH;iBACF;gBACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;wBAAvD,IAAM,cAAc,SAAA;wBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;4BACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;4BACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;gCAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;oCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;wCACpC,mBAAmB,GAAG,EAAE,CAAC;qCAC1B;yCAAM;wCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;4CACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;4CACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;yCACvE;qCACF;iCACF;qCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;oCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;oCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;iCACjF;6BACF;4BACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;gCAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;4CACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;4CACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;yCAC7E;qCACF;iCACF;qCAAM;oCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;iCAC/D;6BACF;4BACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;gCAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;gCAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;4BACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;yBACH;qBACF;iBACF;gBACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;gBAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;gBACzE,IAAI,WAAW,EAAE;oBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;iBACtD;gBAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;wBAAzD,IAAM,eAAe,SAAA;wBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,WAAW,IAAI,SAAS,EAAE;4BAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;4BACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;iCACxE,sBAAsB,CAAC;4BAC1B,IAAI,sBAAsB,EAAE;gCAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oCAAvC,IAAM,GAAG,SAAA;oCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;iCACzE;6BACF;iCAAM;gCACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;oCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;6BACH;yBACF;qBACF;iBACF;gBAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;gBAC3E,IAAI,OAAO,EAAE;oBACX,IAAI,OAAO,CAAC,aAAa,EAAE;wBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;4BACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;yBACpF;qBACF;oBAED,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;qBAC/C;oBAED,IAAI,OAAO,CAAC,OAAO,EAAE;wBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;qBACvC;oBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;wBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;qBACzD;oBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;wBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;qBAC7D;iBACF;gBAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;gBAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;gBAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;oBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;iBACnE;gBAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;oBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;iBAAA,CAC1D,CAAC;aACH;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAChC;YAED,IAAM,EAAE,GAAG,QAAQ,CAAC;YACpB,IAAI,EAAE,EAAE;gBACN,MAAM;;qBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;qBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;aAC5B;YAED,OAAO,MAAM,CAAC;SACf;QACH,oBAAC;IAAD,CAAC,IAAA;aAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;QAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;YACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;YAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;YAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;YACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI;gBACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;oBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;oBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;oBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;oBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;wBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;yBACH;6BAAM,IAAI,CAAC,QAAQ,EAAE;4BACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;gCAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;6BACpC,CAAC,CAAC;yBACJ;qBACF;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;qBACrD;iBACF;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;aACH;SACF;aAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;YAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;gBAA7D,IAAM,iBAAiB,SAAA;gBAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;gBACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;oBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;oBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;iBACH;aACF;SACF;IACH,CAAC;IAED,SAAS,sBAAsB,CAAC,QAAa;QAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;IAC/C,CAAC;IAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;QAEjC,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,MAAM,GAAG,KAAK,CAAC;SAChB;aAAM;YACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;YAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;gBAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;aACxB;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;QAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;QAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;YACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;SAClF;QAED,IAAI,WAAW,EAAE;YACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;gBACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aAC7B;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;aAC5C;SACF;QAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;QACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;QACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;YAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;SAC5F;QAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;QACF,IAAI,eAAe,CAAC,eAAe,EAAE;YACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;SAC5D;QAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;SACzC;QAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;QAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;QACzE,IAAI,aAAa,EAAE;YACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,AAAa,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;SACpD;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;AAID,IAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;QAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;IACJ,CAAC;AAED,aAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;QAEtB,IAAI,KAAU,CAAC;QACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;SACjC;QACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;oBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;iBACtC;qBAAM;oBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;oBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;qBACnF;oBAED,IAAI,eAAe,GAAG,KAAK,CAAC;oBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,eAAe;4BACb,eAAe,CAAC,QAAQ;iCACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;qBAClE;oBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;iBAC7F;;gBAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;aACnE;SACF;aAAM;YACL,IAAI,eAAe,CAAC,QAAQ,EAAE;gBAC5B,KAAK,GAAG,EAAE,CAAC;aACZ;YAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;gBACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;gBAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;gBAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;gBACxE,IAAI,aAAa,KAAK,SAAS,EAAE;oBAC/B,IAAI,CAAC,KAAK,EAAE;wBACV,KAAK,GAAG,EAAE,CAAC;qBACZ;oBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;iBACrC;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;QAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;QAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;YAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;gBACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;aACpC;iBAAM;gBACL,MAAM;aACP;SACF;QACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;YAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;YAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;SAC7B;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;QAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;QAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;QAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;YACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;gBACtC,KAAK,EAAE,SAAS;aACjB,CAAC;SAAA,CAAC;QAEL,IAAI,UAAU,EAAE;YACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;gBACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;aACJ;YAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;YAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;YACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;gBAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;gBACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;gBAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAA3C,IAAM,GAAG,SAAA;oBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;wBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;qBAChD;iBACF;gBAED,IAAI,aAAa,EAAE;oBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;wBAAzC,IAAM,GAAG,SAAA;wBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;qBACzC;iBACF;gBACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;gBACpC,OAAO,aAAa,CAAC;aACtB;YAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;gBACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;aACJ;SACF;QAED,IACE,UAAU;YACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;YACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;YAEA,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;SACJ;QAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;IACL,CAAC;;ICn1BD;AACA,aAWgB,SAAS,CAAC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QACjD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;aACnD;SACF,CAAC;IACJ,CAAC;IAED;QAA+B,6BAAiB;QAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;YAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;YAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;SACtB;QAEM,+BAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAEC;YADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAC9F;QACH,gBAAC;IAAD,CAfA,CAA+B,iBAAiB,GAe/C;IAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;QAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;QAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;QAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;QACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;;IC9CD;AACA,IAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMC,8BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA;;;;;;;;QAWE,0BAAY,KAAa,EAAE,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YATrF,wBAAmB,GAAWA,8BAA4B,CAAC;YAUzD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,sCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;YACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,uBAAC;IAAD,CAAC;;IC/CD;AACA,IAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;QAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;YAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,oDAAW,GAAX,UAAY,WAA4B;YACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;YACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;YAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;YAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,qCAAC;IAAD,CAAC;;ICrDD;AACA,IAqBA;;;AAGA;;;;;QAcE,2BAAY,OAAgC;YAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;aACH;YACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;;;;;;;QAQD,uCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,EAAE;gBAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;aACH;YAED,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;oBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;iBACzC;gBACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;oBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;iBAChE;aACF;YAED,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;oBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;iBAC/E;gBACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;oBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;oBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;qBACxB;oBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;iBAClD;aACF;YAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,wBAAC;IAAD,CAAC;;ICxFD;AACA;QAIsC,oCAAiB;;;;;;;QAOrD,0BAAY,QAAgB;YAA5B,iBAUC;YATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;gBAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,QAAQ;iBACxB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,uBAAC;IAAD,CAlBA,CAAsC,iBAAiB;;ICLvD;AACA;QAIuC,qCAAiB;;;;;;;QAOtD,2BAAY,SAAiB;YAA7B,iBAUC;YATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;gBAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;aACtF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,SAAS;iBACzB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,wBAAC;IAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js new file mode 100644 index 00000000..5e7c8380 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js @@ -0,0 +1,19 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e=e||self).msRest={})}(this,(function(e){"use strict";function t(e){return e.toLowerCase()}function r(e){return!(!e||"object"!=typeof e)&&("function"==typeof e.rawHeaders&&"function"==typeof e.clone&&"function"==typeof e.get&&"function"==typeof e.set&&"function"==typeof e.contains&&"function"==typeof e.remove&&"function"==typeof e.headersArray&&"function"==typeof e.headerValues&&"function"==typeof e.headerNames&&"function"==typeof e.toJson)}var n,o=function(){function e(e){if(this._headersMap={},e)for(var t in e)this.set(t,e[t])}return e.prototype.set=function(e,r){this._headersMap[t(e)]={name:e,value:r.toString()}},e.prototype.get=function(e){var r=this._headersMap[t(e)];return r?r.value:void 0},e.prototype.contains=function(e){return!!this._headersMap[t(e)]},e.prototype.remove=function(e){var r=this.contains(e);return delete this._headersMap[t(e)],r},e.prototype.rawHeaders=function(){var e={};for(var t in this._headersMap){var r=this._headersMap[t];e[r.name.toLowerCase()]=r.value}return e},e.prototype.headersArray=function(){var e=[];for(var t in this._headersMap)e.push(this._headersMap[t]);return e},e.prototype.headerNames=function(){for(var e=[],t=this.headersArray(),r=0;r1&&void 0!==arguments[1]?arguments[1]:0,r=(f[e[t+0]]+f[e[t+1]]+f[e[t+2]]+f[e[t+3]]+"-"+f[e[t+4]]+f[e[t+5]]+"-"+f[e[t+6]]+f[e[t+7]]+"-"+f[e[t+8]]+f[e[t+9]]+"-"+f[e[t+10]]+f[e[t+11]]+f[e[t+12]]+f[e[t+13]]+f[e[t+14]]+f[e[t+15]]).toLowerCase();if(!c(r))throw TypeError("Stringified UUID is invalid");return r}(n)}var d={msRestVersion:"2.6.1",HTTP:"http:",HTTPS:"https:",HTTP_PROXY:"HTTP_PROXY",HTTPS_PROXY:"HTTPS_PROXY",NO_PROXY:"NO_PROXY",ALL_PROXY:"ALL_PROXY",HttpConstants:{HttpVerbs:{PUT:"PUT",GET:"GET",DELETE:"DELETE",POST:"POST",MERGE:"MERGE",HEAD:"HEAD",PATCH:"PATCH"},StatusCodes:{TooManyRequests:429}},HeaderConstants:{AUTHORIZATION:"authorization",AUTHORIZATION_SCHEME:"Bearer",RETRY_AFTER:"Retry-After",USER_AGENT:"User-Agent"}},y="undefined"!=typeof process&&!!process.version&&!!process.versions&&!!process.versions.node;function m(e){var t={};return t.body=e.bodyAsText,t.headers=e.headers,t.status=e.status,t}function v(e){var t=e.clone();return t.headers&&t.headers.remove("authorization"),t}function g(e){return new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$","ig").test(e)}function b(){return h()}function w(e,t){return new Promise((function(r){return setTimeout((function(){return r(t)}),e)}))}var E=/^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;function R(e){return E.test(e)}function T(e,t,r){return e&&t?e.split(t).join(r||""):e}var _=function(){function e(e,t){void 0===e&&(e={}),this.modelMappers=e,this.isXML=t}return e.prototype.validateConstraints=function(e,t,r){var n=function(e,n){throw new Error('"'+r+'" with value "'+t+'" should satisfy the constraint "'+e+'": '+n+".")};if(e.constraints&&null!=t){var o=e.constraints,i=o.ExclusiveMaximum,a=o.ExclusiveMinimum,s=o.InclusiveMaximum,u=o.InclusiveMinimum,l=o.MaxItems,c=o.MaxLength,f=o.MinItems,p=o.MinLength,h=o.MultipleOf,d=o.Pattern,y=o.UniqueItems;if(null!=i&&t>=i&&n("ExclusiveMaximum",i),null!=a&&t<=a&&n("ExclusiveMinimum",a),null!=s&&t>s&&n("InclusiveMaximum",s),null!=u&&tl&&n("MaxItems",l),null!=c&&t.length>c&&n("MaxLength",c),null!=f&&t.length=0&&e[r-1]===t;)--r;return e.substr(0,r)}(i(e),"=").replace(/\+/g,"-").replace(/\//g,"_")}(t)}return t}(r,t):null!==o.match(/^Sequence$/gi)?n=function(e,t,r,n){if(!Array.isArray(r))throw new Error(n+" must be of type Array.");var o=t.type.element;if(!o||"object"!=typeof o)throw new Error('element" metadata for an Array must be defined in the mapper and it must of type "object" in '+n+".");for(var i=[],a=0;a0&&o[o.length-1])||6!==i[0]&&2!==i[0])){a=0;continue}if(3===i[0]&&(!o||i[1]>o[0]&&i[1]'+W.serializeToString(r)}function Z(e){for(var t=[],r=0,n=Object.keys(e);r=200&&r.status<300);a.headersMapper&&(e.parsedHeaders=t.serializer.deserialize(a.headersMapper,e.headers.rawHeaders(),"operationRes.parsedHeaders"))}}else{var u=t.responses.default;if(u){var l=V(t)?"Unexpected status code: "+n:e.bodyAsText,c=new k(l);c.statusCode=n,c.request=v(e.request),c.response=m(e);var f=e.parsedBody;try{if(f){var p=u.bodyMapper;if(p&&"CloudError"===p.serializedName)f.error&&(f=f.error),f.code&&(c.code=f.code),f.message&&(c.message=f.message);else{var h=f;f.error&&(h=f.error),c.code=h.code,h.message&&(c.message=h.message)}if(p){var d=f;t.isXML&&p.type.name===q.Sequence&&(d="object"==typeof f?f[p.xmlElementName]:[]),c.body=t.serializer.deserialize(p,d,"error.body")}}}catch(t){c.message='Error "'+t.message+'" occurred in deserializing the responseBody - "'+e.bodyAsText+'" for the default response.'}return Promise.reject(c)}}}}return Promise.resolve(e)}))}function se(e,t,r,n){return{create:function(o,i){return new ue(o,i,e,t,r,n)}}}var ue=function(e){function t(t,r,n,o,i,a){var s=e.call(this,t,r)||this;function u(e){return"number"==typeof e}return s.retryCount=u(n)?n:3,s.retryInterval=u(o)?o:3e4,s.minRetryInterval=u(i)?i:3e3,s.maxRetryInterval=u(a)?a:9e4,s}return U(t,e),t.prototype.sendRequest=function(e){var t=this;return this._nextPolicy.sendRequest(e.clone()).then((function(r){return le(t,e,r)})).catch((function(r){return le(t,e,r.response,void 0,r)}))},t}(ee);function le(e,t,r,n,o){n=function(e,t,r){t||(t={retryCount:0,retryInterval:0}),r&&(t.error&&(r.innerError=t.error),t.error=r),t.retryCount++;var n=Math.pow(2,t.retryCount)-1;return n*=.8*e.retryInterval+Math.floor(Math.random()*(1.2*e.retryInterval-.8*e.retryInterval)),t.retryInterval=Math.min(e.minRetryInterval+n,e.maxRetryInterval),t}(e,n,o);var i=t.abortSignal&&t.abortSignal.aborted;if(!i&&function(e,t,r){if(null==t||t<500&&408!==t||501===t||505===t)return!1;if(!r)throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null.");return(r&&r.retryCount)0},e.prototype.set=function(e,t){if(e)if(null!=t){var r=Array.isArray(t)?t:t.toString();this._rawQuery[e]=r}else delete this._rawQuery[e]},e.prototype.get=function(e){return e?this._rawQuery[e]:void 0},e.prototype.toString=function(){var e="";for(var t in this._rawQuery){e&&(e+="&");var r=this._rawQuery[t];if(Array.isArray(r)){for(var n=[],o=0,i=r;o0)}(e)){var o=void 0;(null==t?void 0:t.baseUri)&&$e.includes(null==t?void 0:t.baseUri)&&(o=t.baseUri+"/.default"),r=new Ve(e,o)}else r=e;if(r&&!r.signRequest)throw new Error("credentials argument needs to implement signRequest method");if(this._withCredentials=t.withCredentials||!1,this._httpClient=t.httpClient||new j,this._requestPolicyOptions=new te(t.httpPipelineLogger),Array.isArray(t.requestPolicyFactories))n=t.requestPolicyFactories;else if(n=function(e,t){var r=[];t.generateClientRequestIdHeader&&r.push(ce(t.clientRequestIdHeaderName));e&&("function"==typeof e.create?r.push(e):r.push(Ce(e)));var n=Xe(t.userAgentHeaderName,he),o=Xe(t.userAgent,de);n&&o&&r.push(ye({key:n,value:o}));var i=I(I({},Oe),t.redirectOptions);i.handleRedirects&&r.push(Ae(i.maxRetries));r.push(function(e){return void 0===e&&(e=30),{create:function(t,r){return new Ne(t,r,e)}}}(t.rpRegistrationRetryTimeout)),t.noRetryPolicy||(r.push(se()),r.push(Ue()),r.push(Fe()));r.push(re(t.deserializationContentTypes)),t.proxySettings?r.push(ze()):void 0;t.agentSettings&&r.push(Le(t.agentSettings));return r}(r,t),t.requestPolicyFactories){var i=t.requestPolicyFactories(n);i&&(n=i)}this._requestPolicyFactories=n}return t.prototype.sendRequest=function(e){if(null==e||"object"!=typeof e)throw new Error("options cannot be null or undefined and it must be of type object.");var t,n;try{"object"==typeof(n=e)&&"string"==typeof n.url&&"string"==typeof n.method&&"object"==typeof n.headers&&r(n.headers)&&"function"==typeof n.validateRequestProperties&&"function"==typeof n.prepare&&"function"==typeof n.clone?(e.validateRequestProperties(),t=e):t=(t=new C).prepare(e)}catch(e){return Promise.reject(e)}var o=this._httpClient;if(this._requestPolicyFactories&&this._requestPolicyFactories.length>0)for(var i=this._requestPolicyFactories.length-1;i>=0;--i)o=this._requestPolicyFactories[i].create(o,this._requestPolicyOptions);return o.sendRequest(t)},t.prototype.sendOperationRequest=function(t,r,n){"function"==typeof t.options&&(n=t.options,t.options=void 0);var o,i=new C;try{var a=r.baseUrl||this.baseUri;if(!a)throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.");i.method=r.httpMethod,i.operationSpec=r;var s=ge.parse(a);if(r.path&&s.appendPath(r.path),r.urlParameters&&r.urlParameters.length>0)for(var u=0,l=r.urlParameters;u0)for(var p=0,h=r.queryParameters;p0){t.formData={};for(var p=0,h=n.formDataParameters;p0){if(o.isConstant)a=o.defaultValue;else{var s=Je(r,n);s.propertyFound||(s=Je(t,n));var u=!1;s.propertyFound||(u=o.required||"options"===n[0]&&2===n.length),a=u?o.defaultValue:s.propertyValue}var l=$(n,o);i.serialize(o,a,l)}}else for(var c in o.required&&(a={}),n){var f=o.type.modelProperties[c],p=n[c],h=e(t,r,p,f,i),d=$(p,f);i.serialize(f,h,d),void 0!==h&&(a||(a={}),a[c]=h)}return a}(e,t,r.parameterPath,r.mapper,n)}function Je(e,t){for(var r={propertyFound:!1},n=0;n> Request: "+JSON.stringify(t.request,void 0,2)),e.logger(">> Response status code: "+t.status);var r=t.bodyAsText;return e.logger(">> Body: "+r),Promise.resolve(t)}(t,e)}))},t}(ee);var Ze=d.HeaderConstants,et=function(){function e(e,t){if(void 0===t&&(t="Bearer"),this.authorizationScheme="Bearer",!e)throw new Error("token cannot be null or undefined.");this.token=e,this.authorizationScheme=t}return e.prototype.signRequest=function(e){return e.headers||(e.headers=new o),e.headers.set(Ze.AUTHORIZATION,this.authorizationScheme+" "+this.token),Promise.resolve(e)},e}(),tt=d.HeaderConstants,rt=function(){function e(e,t,r){if(void 0===r&&(r="Basic"),this.authorizationScheme="Basic",null==e||"string"!=typeof e.valueOf())throw new Error("userName cannot be null or undefined and must be of type string.");if(null==t||"string"!=typeof t.valueOf())throw new Error("password cannot be null or undefined and must be of type string.");this.userName=e,this.password=t,this.authorizationScheme=r}return e.prototype.signRequest=function(e){var t=this.userName+":"+this.password,r=this.authorizationScheme+" "+btoa(t);return e.headers||(e.headers=new o),e.headers.set(tt.AUTHORIZATION,r),Promise.resolve(e)},e}(),nt=function(){function e(e){if(!e||e&&!e.inHeader&&!e.inQuery)throw new Error('options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.');this.inHeader=e.inHeader,this.inQuery=e.inQuery}return e.prototype.signRequest=function(e){if(!e)return Promise.reject(new Error('webResource cannot be null or undefined and must be of type "object".'));if(this.inHeader)for(var t in e.headers||(e.headers=new o),this.inHeader)e.headers.set(t,this.inHeader[t]);if(this.inQuery){if(!e.url)return Promise.reject(new Error("url cannot be null in the request object."));for(var r in e.url.indexOf("?")<0&&(e.url+="?"),this.inQuery)e.url.endsWith("?")||(e.url+="&"),e.url+=r+"="+this.inQuery[r]}return Promise.resolve(e)},e}(),ot=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("topicKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(nt),it=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("domainKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(nt);e.ApiKeyCredentials=nt,e.AzureIdentityCredentialAdapter=Ve,e.BaseRequestPolicy=ee,e.BasicAuthenticationCredentials=rt,e.Constants=d,e.DefaultHttpClient=j,e.DomainCredentials=it,e.HttpHeaders=o,e.MapperType=q,e.RequestPolicyOptions=te,e.RestError=k,e.Serializer=_,e.ServiceClient=Ye,e.TokenCredentials=et,e.TopicCredentials=ot,e.URLBuilder=ge,e.URLQuery=ve,e.WebResource=C,e.agentPolicy=Le,e.applyMixins=function(e,t){t.forEach((function(t){Object.getOwnPropertyNames(t.prototype).forEach((function(r){e.prototype[r]=t.prototype[r]}))}))},e.delay=w,e.deserializationPolicy=re,e.deserializeResponseBody=ae,e.encodeUri=function(e){return encodeURIComponent(e).replace(/!/g,"%21").replace(/"/g,"%27").replace(/\(/g,"%28").replace(/\)/g,"%29").replace(/\*/g,"%2A")},e.executePromisesSequentially=function(e,t){var r=Promise.resolve(t);return e.forEach((function(e){r=r.then(e)})),r},e.exponentialRetryPolicy=se,e.flattenResponse=We,e.generateClientRequestIdPolicy=ce,e.generateUuid=b,e.getDefaultProxySettings=je,e.getDefaultUserAgentValue=de,e.isDuration=R,e.isNode=y,e.isValidUuid=g,e.logPolicy=function(e){return void 0===e&&(e=console.log),{create:function(t,r){return new Ke(t,r,e)}}},e.promiseToCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){t(void 0,e)}),(function(e){t(e)}))}},e.promiseToServiceCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){process.nextTick(t,void 0,e.parsedBody,e.request,e)}),(function(e){process.nextTick(t,e)}))}},e.proxyPolicy=ze,e.redirectPolicy=Ae,e.serializeObject=function e(t){if(null!=t){if(t instanceof Uint8Array)return t=i(t);if(t instanceof Date)return t.toISOString();if(Array.isArray(t)){for(var r=[],n=0;n 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.1\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString(str, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n errorNS = parser.parseFromString(\"INVALID\", \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI!;\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js b/node_modules/@azure/ms-rest-js/dist/msRest.node.js new file mode 100644 index 00000000..6a07d2ce --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js @@ -0,0 +1,5430 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var uuid = require('uuid'); +var tslib = require('tslib'); +var tough = require('tough-cookie'); +var http = require('http'); +var https = require('https'); +var node_fetch = _interopDefault(require('node-fetch')); +var FormData = _interopDefault(require('form-data')); +var stream = require('stream'); +var tunnel = require('tunnel'); +var xml2js = require('xml2js'); +var os = require('os'); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.1", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); + +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var RestError = /** @class */ (function (_super) { + tslib.__extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var NodeFetchHttpClient = /** @class */ (function (_super) { + tslib.__extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _b.sent(); + httpRequest.headers.set("Cookie", cookieString); + _b.label = 2; + case 2: + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +var defaultJsonContentTypes = ["application/json", "text/json"]; +var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + tslib.__extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + tslib.__extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + tslib.__extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + tslib.__extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} + +function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + tslib.__extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + tslib.__extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry$1(policy, request, operationResponse, err, retryData) { + return tslib.__awaiter(this, void 0, void 0, function () { + var error_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + tslib.__extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @internal + */ +var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + tslib.__extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return tslib.__awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var accessToken, result; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return tslib.__awaiter(this, void 0, void 0, function () { + var tokenResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new NodeFetchHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = tslib.__spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + tslib.__extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants$1 = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var TopicCredentials = /** @class */ (function (_super) { + tslib.__extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DomainCredentials = /** @class */ (function (_super) { + tslib.__extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); + +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.DomainCredentials = DomainCredentials; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TokenCredentials = TokenCredentials; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.agentPolicy = agentPolicy; +exports.applyMixins = applyMixins; +exports.delay = delay; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isNode = isNode; +exports.isValidUuid = isValidUuid; +exports.logPolicy = logPolicy; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=msRest.node.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map new file mode 100644 index 00000000..e1e24cc8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.node.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.ts","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/event-target-shim/src/event.mjs","../node_modules/event-target-shim/src/event-target.mjs","../node_modules/abort-controller/src/abort-signal.ts","../node_modules/abort-controller/src/abort-controller.ts","../lib/restError.ts","../lib/fetchHttpClient.ts","../lib/url.ts","../lib/proxyAgent.ts","../lib/nodeFetchHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.ts","../lib/policies/userAgentPolicy.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.ts","../lib/policies/proxyPolicy.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.1\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport AbortController from \"abort-controller\";\nimport FormData from \"form-data\";\n\nimport { HttpClient } from \"./httpClient\";\nimport { WebResourceLike } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { RestError } from \"./restError\";\nimport { Readable, Transform } from \"stream\";\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\nexport type CommonRequestInfo = string; // we only call fetch() on string urls.\n\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport abstract class FetchHttpClient implements HttpClient {\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResource) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n let loadedBytes = 0;\n const uploadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n httpRequest.onUploadProgress!({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: httpRequest.streamResponseBody\n ? ((response.body as unknown) as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined,\n redirected: response.redirected,\n url: response.url,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n let loadedBytes = 0;\n const downloadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n onDownloadProgress({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody);\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((_e) => {});\n }\n }\n }\n\n abstract async prepareRequest(httpRequest: WebResourceLike): Promise>;\n abstract async processRequest(operationResponse: HttpOperationResponse): Promise;\n abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise;\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable): Promise {\n return new Promise((resolve) => {\n stream.on(\"close\", resolve);\n stream.on(\"end\", resolve);\n stream.on(\"error\", resolve);\n });\n}\n\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\n\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: URLBuilder.parse(proxySettings.host).getHost() as string,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const requestScheme = URLBuilder.parse(requestUrl).getScheme() || \"\";\n const isRequestHttps = requestScheme.toLowerCase() === \"https\";\n const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || \"\";\n const isProxyHttps = proxyScheme.toLowerCase() === \"https\";\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\n// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel\n// createIunnel() is only imported by tests.\nexport interface HttpsProxyOptions {\n host: string;\n port: number;\n localAddress?: string;\n proxyAuth?: string;\n headers?: { [key: string]: any };\n ca?: Buffer[];\n servername?: string;\n key?: Buffer;\n cert?: Buffer;\n}\n\ninterface HttpsOverHttpsOptions {\n maxSockets?: number;\n ca?: Buffer[];\n key?: Buffer;\n cert?: Buffer;\n proxy?: HttpsProxyOptions;\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as tough from \"tough-cookie\";\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport node_fetch from \"node-fetch\";\n\nimport {\n CommonRequestInfo,\n CommonRequestInit,\n CommonResponse,\n FetchHttpClient,\n} from \"./fetchHttpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\nimport { createProxyAgent, ProxyAgent } from \"./proxyAgent\";\n\nexport class NodeFetchHttpClient extends FetchHttpClient {\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return (node_fetch(input, init) as unknown) as Promise;\n }\n\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n if (httpRequest.agentSettings) {\n const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings;\n if (httpsAgent && httpRequest.url.startsWith(\"https\")) {\n requestInit.agent = httpsAgent;\n } else if (httpAgent) {\n requestInit.agent = httpAgent;\n }\n } else if (httpRequest.proxySettings) {\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n requestInit.agent = tunnel.agent;\n }\n\n if (httpRequest.keepAlive === true) {\n if (requestInit.agent) {\n requestInit.agent.keepAlive = true;\n } else {\n const options: http.AgentOptions | https.AgentOptions = { keepAlive: true };\n const agent = httpRequest.url.startsWith(\"https\")\n ? new https.Agent(options)\n : new http.Agent(options);\n requestInit.agent = agent;\n }\n }\n\n return requestInit;\n }\n\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader != undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as xml2js from \"xml2js\";\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const builder = new xml2js.Builder({\n rootName: (opts || {}).rootName,\n renderOpts: {\n pretty: false,\n },\n });\n return builder.buildObject(obj);\n}\n\nexport function parseXML(str: string): Promise {\n const xmlParser = new xml2js.Parser({\n explicitArray: false,\n explicitCharkey: false,\n explicitRoot: false,\n });\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err?: Error, res?: any) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as os from \"os\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nimport { Constants } from \"../util/constants\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new AgentPolicy(nextPolicy, options, agentSettings!);\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n agentSettings: AgentSettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n agentSettings: AgentSettings\n ) {\n super(nextPolicy, options);\n this.agentSettings = agentSettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.agentSettings) {\n request.agentSettings = this.agentSettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"../util/constants\";\nimport { URLBuilder } from \"../url\";\n\n/**\n * @internal\n */\nexport const noProxyList: string[] = loadNoProxy();\nconst byPassedList: Map = new Map();\n\n/**\n * @internal\n */\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n// Check whether the host of a given `uri` is in the noProxyList.\n// If there's a match, any request sent to the same host won't have the proxy settings set.\n// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\nfunction isBypassed(uri: string): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (byPassedList.has(host)) {\n return byPassedList.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n byPassedList.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * @internal\n */\nfunction extractAuthFromUrl(\n url: string\n): { username?: string; password?: string; urlWithoutAuth: string } {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\nexport function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ProxyPolicy(nextPolicy, options, proxySettings!);\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n proxySettings: ProxySettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n proxySettings: ProxySettings\n ) {\n super(nextPolicy, options);\n this.proxySettings = proxySettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.proxySettings && !isBypassed(request.url)) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","__extends","Transform","tunnel.httpsOverHttps","tunnel.httpsOverHttp","tunnel.httpOverHttps","tunnel.httpOverHttp","tough.CookieJar","https.Agent","http.Agent","HttpPipelineLogLevel","xml2js.Builder","xml2js.Parser","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","os.arch","os.type","os.release","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","__spreadArrays","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;;AAGA,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;SA4Ee,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;IAME,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;KACF;;;;;;;IAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;KACH;;;;;;IAOM,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;;;;IAKM,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;;;;;;IAOM,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;KACf;;;;IAKM,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;KAChB;;;;IAKM,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;KACpB;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;KACrB;;;;IAKM,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;KAC1B;;;;IAKM,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;KACtC;;;;IAKM,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;KAC3C;IACH,kBAAC;AAAD,CAAC;;ACrOD;AACA;AAEA;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;;AAIA,SAAgB,eAAe,CAAC,KAAiB;;;IAG/C,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC;;AC5BD;AACA;AAEA,IAAa,SAAS,GAAG;;;;;;IAMvB,aAAa,EAAE,OAAO;;;;;;;IAQtB,IAAI,EAAE,OAAO;;;;;;;IAQb,KAAK,EAAE,QAAQ;;;;;;;IAQf,UAAU,EAAE,YAAY;;;;;;;IAQxB,WAAW,EAAE,aAAa;;;;IAK1B,QAAQ,EAAE,UAAU;;;;IAKpB,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;;;;;;;QAOb,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;;;;IAKD,eAAe,EAAE;;;;;;;QAOf,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;QAU9B,WAAW,EAAE,aAAa;;;;;;;QAQ1B,UAAU,EAAE,YAAY;KACzB;CACF;;AC3GD;AACA,AAQA;;;AAGA,IAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,AAUA;;;;;;AAMA,SAAgB,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;;AAQA,SAAgB,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;;AAQA,SAAgB,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;;AAOA,SAAgB,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,AA4BA;;;;;AAKA,SAAgB,YAAY;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;;AAWA,SAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;KACtC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,AAeA;;;;;;AAMA,SAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;;AAMA,SAAgB,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;SACrB,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;SACT,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED;;;;;AAKA,SAAgB,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;SAC3E,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;SAC3B,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED,SAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;;AAKA,SAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;SAC1D,CAAC,CAAC;KACJ,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;;AAKA,SAAgB,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;;AAOA,SAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;;AAMA,SAAgB,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;;AC/RD;AACA;IAME,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;KAC7B;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;SACH,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;KACF;;;;;;;;;;;;IAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;;;;;;;;;;QAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;;YAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;KAChB;;;;;;;;;;;;IAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;aACnB;;YAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;gBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;KAChB;IACH,iBAAC;AAAD,CAAC,IAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;;IAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;;IAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;IAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;KACvB,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;sBACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;sBAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;;YAEL,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;YAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;YAUtE,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;;YAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;gBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;SACb,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;YAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;sBAC3B,kBAAkB;sBAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,QACE,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AAoHD;AACA,SAAgB,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;AAGA,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,IAAa,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC;;ACtiCF;AACA,SAgKgB,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;AAQA;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;KACpC;;;;;;IAOD,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;KACF;;;;;;IAOD,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;aACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;iBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;iBACF,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;;YAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;;YAED,IAAM,WAAW,GAAG,EAAE,CAAC;;YAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;;QAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;YAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;KACb;;;;;IAMD,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;KACf;IACH,kBAAC;AAAD,CAAC;;;;;;;AChhBD;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASM,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQM,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASM,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASM,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASM,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;AC1WD;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;AC5DD;AACA;IAK+BG,mCAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;KAClD;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,CA3B8B,KAAK;;ACNpC;AACA,AAgCA;IAAA;KAwMC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;6BACF,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;6BACzB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;;gCAE9C,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;6BACF,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;;oCAEL,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;8BACvB,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;kCACpC,WAAW,CAAC,IAAI,EAAE;kCAClB,WAAW,CAAC,IAAI;8BAClB,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAIC,gBAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;iCAC5B;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,oBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;kCAC5C,QAAQ,CAAC,IAA0C;kCACrD,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAIA,gBAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;qCAC5B;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;;oCAEV,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;;wBAGjB,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;6BACR,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,KAAO,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,IAAA;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;KAC7B,CAAC,CAAC;AACL,CAAC;AAED,SAAgB,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;KAC7B,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;;AC/PD;AACA,AAMA;;;AAGA;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;KAqHtF;;;;IAhHQ,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;;;;;;IAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;KACF;;;;;IAMM,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;;;;IAKM,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY;oBAClB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;KACf;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;AAGA;IAAA;KAiPC;;;;;IAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;KACF;;;;IAKM,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;KACF;;;;;IAMM,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;IAKM,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;KACF;;;;;;IAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;KACF;;;;;IAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;;;;IAKM,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;;;;IAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;oBAChB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;KACF;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;KACf;;;;;IAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;KACF;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;KACf;IACH,iBAAC;AAAD,CAAC,IAAA;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;KAAI;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;;AAIA,SAAgB,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;SACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;;AAGA;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;;;;;IAMM,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;;;;IAKM,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa;gBACxB,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;IACH,mBAAC;AAAD,CAAC,IAAA;AAED;;;AAGA,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;;AAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;;AAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;;AAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIA,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;AACzF,CAAC;AAED;;;;AAIA,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC;;AClpBD;AACA,SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,SAAgB,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAOC,qBAAqB,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAOC,mBAAmB,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;;AChFD;AACA;IAiByCL,6CAAe;IAAxD;QAAA,qEA4EC;QA3EkB,eAAS,GAAG,IAAIM,eAAe,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;;KA2ElF;IAzEO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;;;;wBACzC,WAAW,GAA2C,EAAE,CAAC;8BAE3D,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA,EAApD,wBAAoD;wBACjC,qBAAM,IAAI,OAAO,CAAS,UAAC,OAAO,EAAE,MAAM;gCAC7D,KAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,MAAM;oCAC3D,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,CAAC,MAAM,CAAC,CAAC;qCACjB;iCACF,CAAC,CAAC;6BACJ,CAAC,EAAA;;wBARI,YAAY,GAAG,SAQnB;wBAEF,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;;;wBAGlD,IAAI,WAAW,CAAC,aAAa,EAAE;4BACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;4BACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gCACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;6BAChC;iCAAM,IAAI,SAAS,EAAE;gCACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;6BAC/B;yBACF;6BAAM,IAAI,WAAW,CAAC,aAAa,EAAE;4BAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;4BACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;yBAClC;wBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;4BAClC,IAAI,WAAW,CAAC,KAAK,EAAE;gCACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;6BACpC;iCAAM;gCACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;gCACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;sCAC7C,IAAIC,WAAW,CAAC,OAAO,CAAC;sCACxB,IAAIC,UAAU,CAAC,OAAO,CAAC,CAAC;gCAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;6BAC3B;yBACF;wBAED,sBAAO,WAAW,EAAC;;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,iBAAwC;;;;;;;6BACvD,IAAI,CAAC,SAAS,EAAd,wBAAc;wBACV,oBAAkB,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;8BAChE,iBAAe,IAAI,SAAS,CAAA,EAA5B,wBAA4B;wBAC9B,qBAAM,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gCAChC,KAAI,CAAC,SAAU,CAAC,SAAS,CACvB,iBAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,UAAC,GAAG;oCACF,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,EAAE,CAAC;qCACX;iCACF,CACF,CAAC;6BACH,CAAC,EAAA;;wBAbF,SAaE,CAAC;;;;;;KAGR;IACH,0BAAC;AAAD,CA5EA,CAAyC,eAAe;;AClBxD;AACA,AAKA,WAAY,oBAAoB;;;;IAI9B,6DAAG,CAAA;;;;IAKH,iEAAK,CAAA;;;;IAKL,qEAAO,CAAA;;;;IAKP,+DAAI,CAAA;AACN,CAAC,EApBWC,4BAAoB,KAApBA,4BAAoB,QAoB/B;;AC1BD;AACA;AA2EA;;;;;AAKA,SAAgB,iBAAiB,CAAC,UAAmB;;;;;;IAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,QACE,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;SAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;AACJ,CAAC;;AChGD;AACA;AAkDA;;;;;AAKA,SAAgB,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,SAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACzED;AACA,SAmFgB,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACjGD;AACA,SAIgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAIC,cAAc,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,EAAE,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAgB,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAIC,aAAa,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAW,EAAE,GAAS;gBAChD,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;aACF,CAAC,CAAC;SACJ;KACF,CAAC,CAAC;AACL,CAAC;;AClCD;AACA;IAmBE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;KACzC;;;;;;IASG,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;;;;;;;IAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;IACH,wBAAC;AAAD,CAAC,IAAA;AAsBD;;;AAGA;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;KAAI;;;;;;IAO7C,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKF,4BAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;;;;;;;IAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;KACF;IACH,2BAAC;AAAD,CAAC;;ACjGD;AACA,AAmCA;;;;AAIA,SAAgB,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED,AAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,AAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;;AAIA;IAA2CT,+CAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;KAC9F;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;qBAAA,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;qBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;sBAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;sBACrC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;8BAChE,6BAA2B,UAAY;8BACtC,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAGY,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;kDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;kDAC9D,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;sCAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;sCAC3D,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;wBAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;KACxC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;KAC1B,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;cAC5C,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;aAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;aAC1B,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC;;ACrSD;AACA,SAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;;AAIA;IAA4Cb,gDAAiB;;;;;;;;;;IA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;SAC9B;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;;KACvC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAC7E;IACH,6BAAC;AAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;AAED;;;;;;;;AAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;SACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;;AAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAOc,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;QAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC;;AC3ND;AACA,SAYgB,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED;IAAmDd,uDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;KAGrC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEe,YAAkB,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,oCAAC;AAAD,CAfA,CAAmD,iBAAiB,GAenE;;ACtCD;AACA,SAMgB,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,SAAgB,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAIC,OAAO,EAAE,SAAIC,OAAO,EAAE,SAAIC,UAAU,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC;;ACvBD;AACA,AAgBA,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;KAC9B,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,AAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,SAAgB,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;UAC9C,wBAAwB,EAAE;UAC1B,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAqClB,yCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;KAG9B;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;KACF;IACH,sBAAC;AAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ACvFD;AACA,AA4BO,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,SAAgB,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;IAAoCA,wCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;KAGzB;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;KAC1D;IACH,qBAAC;AAAD,CAdA,CAAoC,iBAAiB,GAcpD;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;SACb,MAAM,KAAK,GAAG;aACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;aAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;QAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;IAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;;SC5Fe,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;IAA0CA,8CAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;KAG5B;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,2BAAC;AAAD,CAdA,CAA0C,iBAAiB,GAc1D;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;iBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;;;oBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEe,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;AAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;;;IAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;;AAMA,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;;SAEb;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;AAMA,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;;AASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;KAC/D,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;AASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAOI,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;SACpE;KACF,CAAC,CAAC;AACL,CAAC;;ACpMD;AACA,SAYgB,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;IAAmCnB,uCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;KAGxD;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;SAAA,CAC1C,CAAC;KACH;IACH,oBAAC;AAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ACzCD;AACA,SAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;;AAUA;IAA4CA,gDAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;;KAC9C;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAoB,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,6BAAC;AAAD,CArCA,CAA4C,iBAAiB,GAqC5D;AAED;;;;;;;AAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;AAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;0BAElD,GAAG;wBACH,GAAG,CAAC,IAAI;wBACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;yBAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;;wBAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B;;AC1LD;AACA,AAKA,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ACZD;AACA,SAYgB,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCxB,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvCD;AACA,AAcA;;;AAGA,AAAO,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;;AAGA,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;AACA;AACA;AACA,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;YAGtB,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;AAGA,SAAgB,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,GAAA,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,GAAA,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;AAGA,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,SAAgB,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,SAAgB,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCA,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvKD;AACA,AAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,SAAgB,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;SACnE;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMA;IAA2CA,+CAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;KAC9B;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;qBAC7C,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;8BAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;KACF;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;KACF;IACH,4BAAC;AAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ACxGD;AACA,AASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA,AAAO,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;;AAIA;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrByB,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC;;AC3DD;AACA,AA+JA;;;;AAIA;;;;;;;IAgCE,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,mBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;;;;IAKD,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;;;;;;;IAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;aAAA,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;;iBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;KACf;IACH,oBAAC;AAAD,CAAC,IAAA;SAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,qCAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID,AAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,SAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;6BACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;aAC7F;;YAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;KAAA,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,mCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;YAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;YACnF,IAAM,aAAa,GAAGC,qBAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,mCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;;ACn1BD;AACA,SAWgB,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;SACnD;KACF,CAAC;AACJ,CAAC;AAED;IAA+B7B,mCAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;KACtB;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAC9F;IACH,gBAAC;AAAD,CAfA,CAA+B,iBAAiB,GAe/C;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;;AC9CD;AACA,AAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM8B,8BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA;;;;;;;;IAWE,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QATrF,wBAAmB,GAAWA,8BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,uBAAC;AAAD,CAAC;;AC/CD;AACA,AAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;IAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,qCAAC;AAAD,CAAC;;ACrDD;AACA,AAqBA;;;AAGA;;;;;IAcE,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;;;;;;;IAQD,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,wBAAC;AAAD,CAAC;;ACxFD;AACA;IAIsC/B,0CAAiB;;;;;;;IAOrD,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,uBAAC;AAAD,CAlBA,CAAsC,iBAAiB;;ACLvD;AACA;IAIuCA,2CAAiB;;;;;;;IAOtD,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,wBAAC;AAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dom-shim.d.ts b/node_modules/@azure/ms-rest-js/dom-shim.d.ts new file mode 100644 index 00000000..cb4ac4d9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts new file mode 100644 index 00000000..8fa67fd5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts @@ -0,0 +1,9 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise>; + processRequest(_operationResponse: HttpOperationResponse): Promise; + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +//# sourceMappingURL=browserFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map new file mode 100644 index 00000000..a3ae8cab --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,sBAAuB,SAAQ,eAAe;IACzD,cAAc,CAAC,YAAY,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAI5E,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAGnF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js new file mode 100644 index 00000000..eea907fc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { FetchHttpClient, } from "./fetchHttpClient"; +var BrowserFetchHttpClient = /** @class */ (function (_super) { + __extends(BrowserFetchHttpClient, _super); + function BrowserFetchHttpClient() { + return _super !== null && _super.apply(this, arguments) || this; + } + BrowserFetchHttpClient.prototype.prepareRequest = function (_httpRequest) { + return Promise.resolve({}); + }; + BrowserFetchHttpClient.prototype.processRequest = function (_operationResponse) { + return Promise.resolve(); + }; + BrowserFetchHttpClient.prototype.fetch = function (input, init) { + return fetch(input, init); + }; + return BrowserFetchHttpClient; +}(FetchHttpClient)); +export { BrowserFetchHttpClient }; +//# sourceMappingURL=browserFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map new file mode 100644 index 00000000..a47796d8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.js","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAI3B;IAA4C,0CAAe;IAA3D;;IAYA,CAAC;IAXC,+CAAc,GAAd,UAAe,YAA6B;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC7B,CAAC;IAED,+CAAc,GAAd,UAAe,kBAAyC;QACtD,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED,sCAAK,GAAL,UAAM,KAAwB,EAAE,IAAwB;QACtD,OAAO,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC5B,CAAC;IACH,6BAAC;AAAD,CAAC,AAZD,CAA4C,eAAe,GAY1D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts new file mode 100644 index 00000000..f566c6f0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,46 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 00000000..6a91bfcc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE;;;GAGG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;;OAGG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js new file mode 100644 index 00000000..587a24af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); +export { ApiKeyCredentials }; +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map new file mode 100644 index 00000000..7f3fc82d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAmB7C;;GAEG;AACH;IAUE;;;OAGG;IACH,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,wBAAC;AAAD,CAAC,AA/DD,IA+DC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts new file mode 100644 index 00000000..7cb357ee --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts @@ -0,0 +1,20 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResource } from "../webResource"; +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export declare const azureResourceManagerEndpoints: string[]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export declare class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential; + private scopes; + constructor(azureTokenCredential: TokenCredential, scopes?: string | string[]); + getToken(): Promise; + signRequest(webResource: WebResource): Promise; +} +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map new file mode 100644 index 00000000..fff7fbdc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.d.ts","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACnD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIhD;;GAEG;AACH,eAAO,MAAM,6BAA6B,UAKzC,CAAC;AAEF;;;GAGG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,OAAO,CAAC,oBAAoB,CAAkB;IAC9C,OAAO,CAAC,MAAM,CAAoB;gBAEhC,oBAAoB,EAAE,eAAe,EACrC,MAAM,GAAE,MAAM,GAAG,MAAM,EAA4C;IAMxD,QAAQ,IAAI,OAAO,CAAC,aAAa,CAAC;IAclC,WAAW,CAAC,WAAW,EAAE,WAAW;CAQlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js new file mode 100644 index 00000000..3fdea9bc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __generator } from "tslib"; +import { Constants as MSRestConstants } from "../util/constants"; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + return [2 /*return*/]; + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(MSRestConstants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); +export { AzureIdentityCredentialAdapter }; +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map new file mode 100644 index 00000000..fa8bacdf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.js","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,SAAS,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAMjE,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH,MAAM,CAAC,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;GAGG;AACH;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 00000000..c313e525 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,24 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 00000000..59a2b8c9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAItE,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;;OAOG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAOzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js new file mode 100644 index 00000000..e1553b0b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + base64.encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); +export { BasicAuthenticationCredentials }; +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 00000000..a89f01e6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;IAKE;;;;;;;OAOG;IACH,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QAb5D,wBAAmB,GAAW,4BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,qCAAC;AAAD,CAAC,AA1CD,IA0CC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts new file mode 100644 index 00000000..34c7db7a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts @@ -0,0 +1,2 @@ +export declare type Authenticator = (challenge: object) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map new file mode 100644 index 00000000..81ab42b0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAGA,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js new file mode 100644 index 00000000..a9eeec2e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map new file mode 100644 index 00000000..939147b7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts new file mode 100644 index 00000000..481819ca --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string); +} +//# sourceMappingURL=domainCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map new file mode 100644 index 00000000..0295f07f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,iBAAkB,SAAQ,iBAAiB;IACtD;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;CAW9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js new file mode 100644 index 00000000..51cb0ae1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); +export { DomainCredentials }; +//# sourceMappingURL=domainCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map new file mode 100644 index 00000000..579d63e5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAuC,qCAAiB;IACtD;;;;;OAKG;IACH,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,wBAAC;AAAD,CAAC,AAlBD,CAAuC,iBAAiB,GAkBvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts new file mode 100644 index 00000000..af0bbaca --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,11 @@ +import { WebResourceLike } from "../webResource"; +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 00000000..2c091a37 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js new file mode 100644 index 00000000..e94df45c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map new file mode 100644 index 00000000..68ab1588 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts new file mode 100644 index 00000000..95a5c310 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts @@ -0,0 +1,25 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export declare class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string; + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=tokenCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map new file mode 100644 index 00000000..85e3807c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAKtE;;GAEG;AACH,qBAAa,gBAAiB,YAAW,wBAAwB;IAC/D,KAAK,EAAE,MAAM,CAAC;IACd,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBACS,KAAK,EAAE,MAAM,EAAE,mBAAmB,GAAE,MAAqC;IAQrF;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAQzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js new file mode 100644 index 00000000..70265d73 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); +export { TokenCredentials }; +//# sourceMappingURL=tokenCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map new file mode 100644 index 00000000..302ef4e8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAI9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH;IAIE;;;;;;OAMG;IACH,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QATrF,wBAAmB,GAAW,4BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,uBAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts new file mode 100644 index 00000000..eb4f1798 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts @@ -0,0 +1,10 @@ +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} +//# sourceMappingURL=tokenResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map new file mode 100644 index 00000000..7636a8a3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,EAAE,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAC3B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js new file mode 100644 index 00000000..42ec1996 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=tokenResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map new file mode 100644 index 00000000..b145bf0f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.js","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts new file mode 100644 index 00000000..059fe935 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map new file mode 100644 index 00000000..cf4a12bf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;;OAKG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js new file mode 100644 index 00000000..bed72a93 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); +export { TopicCredentials }; +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map new file mode 100644 index 00000000..267491d1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAsC,oCAAiB;IACrD;;;;;OAKG;IACH,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,uBAAC;AAAD,CAAC,AAlBD,CAAsC,iBAAiB,GAkBtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts new file mode 100644 index 00000000..883145a4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map new file mode 100644 index 00000000..9a041726 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js new file mode 100644 index 00000000..49ae4271 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map new file mode 100644 index 00000000..d21b02eb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts new file mode 100644 index 00000000..a7381a24 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map new file mode 100644 index 00000000..35b8b1fa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js new file mode 100644 index 00000000..a8b56aa8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map new file mode 100644 index 00000000..1effd0f9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts new file mode 100644 index 00000000..eb15bc12 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts @@ -0,0 +1,23 @@ +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type CommonRequestInfo = string; +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare abstract class FetchHttpClient implements HttpClient { + sendRequest(httpRequest: WebResourceLike): Promise; + abstract prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract processRequest(operationResponse: HttpOperationResponse): Promise; + abstract fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +//# sourceMappingURL=fetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map new file mode 100644 index 00000000..926678cc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAU7D,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,8BAAsB,eAAgB,YAAW,UAAU;IACnD,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;aAoMhE,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;aAC3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;aACvE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAClG;AAcD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js new file mode 100644 index 00000000..92c646a0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __awaiter, __generator } from "tslib"; +import AbortController from "abort-controller"; +import FormData from "form-data"; +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Transform } from "stream"; +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = __assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +export { FetchHttpClient }; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +export function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map new file mode 100644 index 00000000..4c8e9198 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.js","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,eAAe,MAAM,kBAAkB,CAAC;AAC/C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAKjC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAsB7C;IAAA;IAwMA,CAAC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;4BACH,CAAC,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;4BAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gCAC9C,0FAA0F;gCAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;4BACH,CAAC,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;oCACL,kEAAkE;oCAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;4BACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gCACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gCACpB,CAAC,CAAC,WAAW,CAAC,IAAI;4BACpB,CAAC,CAAC,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAI,SAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;gCAC7B,CAAC;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,cACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;gCAChD,CAAC,CAAG,QAAQ,CAAC,IAA0C;gCACvD,CAAC,CAAC,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAI,SAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;oCAC7B,CAAC;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;oCACV,wEAAwE;oCACxE,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;wBAEjB,0BAA0B;wBAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;4BACT,CAAC,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,IAAM,CAAC,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,AAxMD,IAwMC;;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts new file mode 100644 index 00000000..5f5f00e9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map new file mode 100644 index 00000000..2da0af74 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js new file mode 100644 index 00000000..8dca936c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map new file mode 100644 index 00000000..e8c060fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts new file mode 100644 index 00000000..4f95f2ea --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts @@ -0,0 +1,132 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: any): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header names that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map new file mode 100644 index 00000000..051d138c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,IAAI,cAAc,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAoBzE;AAED;;GAEG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IASnC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,IAAI,cAAc;IAI/B;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAG5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js new file mode 100644 index 00000000..0cc2af27 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); +export { HttpHeaders }; +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map new file mode 100644 index 00000000..3a4d8ac4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH;IAGE,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;IAC3B,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACI,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAC5C,CAAC;IACH,kBAAC;AAAD,CAAC,AAxHD,IAwHC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts new file mode 100644 index 00000000..d5790e04 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts @@ -0,0 +1,83 @@ +/// +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map new file mode 100644 index 00000000..778b5e07 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAE3C;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IAErB;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js new file mode 100644 index 00000000..bdb33084 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map new file mode 100644 index 00000000..14302d67 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts new file mode 100644 index 00000000..7a13ab29 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map new file mode 100644 index 00000000..91f637cc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js new file mode 100644 index 00000000..8971a068 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map new file mode 100644 index 00000000..d933399f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts new file mode 100644 index 00000000..fdbf56c8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map new file mode 100644 index 00000000..eac81c6b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js new file mode 100644 index 00000000..a8b1cb53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +var ConsoleHttpPipelineLogger = /** @class */ (function () { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + function ConsoleHttpPipelineLogger(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + ConsoleHttpPipelineLogger.prototype.log = function (logLevel, message) { + var logMessage = HttpPipelineLogLevel[logLevel] + ": " + message; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + }; + return ConsoleHttpPipelineLogger; +}()); +export { ConsoleHttpPipelineLogger }; +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map new file mode 100644 index 00000000..00f17beb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH;IACE;;;OAGG;IACH,mCAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,uCAAG,GAAH,UAAI,QAA8B,EAAE,OAAe;QACjD,IAAM,UAAU,GAAM,oBAAoB,CAAC,QAAQ,CAAC,UAAK,OAAS,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;IACH,gCAAC;AAAD,CAAC,AA5BD,IA4BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts new file mode 100644 index 00000000..1244b50f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts @@ -0,0 +1,42 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, AbortSignalLike, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { AgentSettings, ProxySettings, ServiceClient, ServiceClientOptions, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { TelemetryInfo, userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { DeserializationContentTypes, deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map new file mode 100644 index 00000000..7737b322 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.d.ts","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":";AAKA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,EACrB,eAAe,GAChB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACzF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,aAAa,EACb,aAAa,EACb,oBAAoB,EACpB,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,aAAa,EACb,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,2BAA2B,EAC3B,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAG7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js b/node_modules/@azure/ms-rest-js/es/lib/msRest.js new file mode 100644 index 00000000..ba1ead7f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { ServiceClient, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map new file mode 100644 index 00000000..038a369c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.js","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,yCAAyC;AAEzC,OAAO,EACL,WAAW,GASZ,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAUxC,OAAO,EAGL,aAAa,EAEb,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAEL,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAEL,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,cAAc;AACd,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAElE,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AAEpE,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts new file mode 100644 index 00000000..1a5dec44 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts @@ -0,0 +1,10 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class NodeFetchHttpClient extends FetchHttpClient { + private readonly cookieJar; + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + prepareRequest(httpRequest: WebResourceLike): Promise>; + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map new file mode 100644 index 00000000..faa1930c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAQA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAGhD,qBAAa,mBAAoB,SAAQ,eAAe;IACtD,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAuD;IAE3E,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIlF,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAgD3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js new file mode 100644 index 00000000..f919e6e2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as tough from "tough-cookie"; +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; +import { FetchHttpClient, } from "./fetchHttpClient"; +import { createProxyAgent } from "./proxyAgent"; +var NodeFetchHttpClient = /** @class */ (function (_super) { + __extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent; + var _this = this; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _b.sent(); + httpRequest.headers.set("Cookie", cookieString); + _b.label = 2; + case 2: + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return __awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); +export { NodeFetchHttpClient }; +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map new file mode 100644 index 00000000..5834bfb9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAEpC,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,gBAAgB,EAAc,MAAM,cAAc,CAAC;AAE5D;IAAyC,uCAAe;IAAxD;QAAA,qEA4EC;QA3EkB,eAAS,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;;IA2EnF,CAAC;IAzEO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;;;;wBACzC,WAAW,GAA2C,EAAE,CAAC;6BAE3D,CAAA,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA,EAApD,wBAAoD;wBACjC,qBAAM,IAAI,OAAO,CAAS,UAAC,OAAO,EAAE,MAAM;gCAC7D,KAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,MAAM;oCAC3D,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,CAAC,MAAM,CAAC,CAAC;qCACjB;gCACH,CAAC,CAAC,CAAC;4BACL,CAAC,CAAC,EAAA;;wBARI,YAAY,GAAG,SAQnB;wBAEF,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;;;wBAGlD,IAAI,WAAW,CAAC,aAAa,EAAE;4BACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;4BACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gCACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;6BAChC;iCAAM,IAAI,SAAS,EAAE;gCACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;6BAC/B;yBACF;6BAAM,IAAI,WAAW,CAAC,aAAa,EAAE;4BAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;4BACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;yBAClC;wBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;4BAClC,IAAI,WAAW,CAAC,KAAK,EAAE;gCACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;6BACpC;iCAAM;gCACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;gCACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;oCAC/C,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;oCAC1B,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gCAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;6BAC3B;yBACF;wBAED,sBAAO,WAAW,EAAC;;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,iBAAwC;;;;;;;6BACvD,IAAI,CAAC,SAAS,EAAd,wBAAc;wBACV,oBAAkB,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;6BAChE,CAAA,iBAAe,IAAI,SAAS,CAAA,EAA5B,wBAA4B;wBAC9B,qBAAM,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gCAChC,KAAI,CAAC,SAAU,CAAC,SAAS,CACvB,iBAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,UAAC,GAAG;oCACF,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,EAAE,CAAC;qCACX;gCACH,CAAC,CACF,CAAC;4BACJ,CAAC,CAAC,EAAA;;wBAbF,SAaE,CAAC;;;;;;KAGR;IACH,0BAAC;AAAD,CAAC,AA5ED,CAAyC,eAAe,GA4EvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts new file mode 100644 index 00000000..7eadcda9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map new file mode 100644 index 00000000..e05a58fe --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js new file mode 100644 index 00000000..c8bbde71 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map new file mode 100644 index 00000000..ee2fe5c7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts new file mode 100644 index 00000000..f5b78021 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts @@ -0,0 +1,51 @@ +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map new file mode 100644 index 00000000..2cc31fa2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js new file mode 100644 index 00000000..8ab319a6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map new file mode 100644 index 00000000..ba98f5f1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAkD/F;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts new file mode 100644 index 00000000..f99c6003 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts @@ -0,0 +1,15 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map new file mode 100644 index 00000000..d96474af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js new file mode 100644 index 00000000..b6e32813 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map new file mode 100644 index 00000000..a31b9a5f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts new file mode 100644 index 00000000..9be13076 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts @@ -0,0 +1,68 @@ +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +export declare function isStreamOperation(operationSpec: OperationSpec): boolean; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map new file mode 100644 index 00000000..f2d4efb0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED,wBAAgB,iBAAiB,CAAC,aAAa,EAAE,aAAa,GAAG,OAAO,CAavE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js new file mode 100644 index 00000000..06b8223d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { MapperType } from "./serializer"; +export function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map new file mode 100644 index 00000000..667db1ad --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAQ/F,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA2EtD,MAAM,UAAU,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts new file mode 100644 index 00000000..7893df0c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts @@ -0,0 +1,10 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map new file mode 100644 index 00000000..eefa217f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js new file mode 100644 index 00000000..ac61fd5a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); +export function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + return _this; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map new file mode 100644 index 00000000..eec280a1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts new file mode 100644 index 00000000..84d197e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts @@ -0,0 +1,11 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, agentSettings: AgentSettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map new file mode 100644 index 00000000..429e0a52 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAM/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js new file mode 100644 index 00000000..4cdfb5ac --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map new file mode 100644 index 00000000..15d82ec2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts new file mode 100644 index 00000000..de2ef2c4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts @@ -0,0 +1,38 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + constructor(nextPolicy: RequestPolicy, deserializationContentTypes: DeserializationContentTypes | undefined, options: RequestPolicyOptionsLike); + sendRequest(request: WebResourceLike): Promise; +} +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map new file mode 100644 index 00000000..f5771b5b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,GACxD,oBAAoB,CAMtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;gBAGxC,UAAU,EAAE,aAAa,EACzB,2BAA2B,EAAE,2BAA2B,GAAG,SAAS,EACpE,OAAO,EAAE,wBAAwB;IAUtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4HhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js new file mode 100644 index 00000000..833bb48e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +export var defaultJsonContentTypes = ["application/json", "text/json"]; +export var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +export { DeserializationPolicy }; +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map new file mode 100644 index 00000000..44123c3c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EAAiB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAU,UAAU,EAAE,MAAM,eAAe,CAAC;AACnD,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAoBzB;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;GAGG;AACH;IAA2C,yCAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;;IAC/F,CAAC;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;oBAA9E,CAA8E,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAAC,AAxBD,CAA2C,iBAAiB,GAwB3D;;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;oBAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;oBAC5D,CAAC,CAAC,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;oBACvC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;4BAClE,CAAC,CAAC,6BAA2B,UAAY;4BACzC,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;gDACrC,CAAC,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;gDAChE,CAAC,CAAC,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;oCACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;oCAC7D,CAAC,CAAC,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;wBAC9C,uGAAuG;wBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;IACzC,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,EAAvB,CAAuB,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA1C,CAA0C,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAzC,CAAyC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 00000000..3442911a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,48 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 00000000..a972a4c1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAGzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAOD;;;GAGG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;;OAQG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js new file mode 100644 index 00000000..82029bf3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { RestError } from "../restError"; +export function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +export { ExponentialRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map new file mode 100644 index 00000000..f424d434 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAczC,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;GAGG;AACH;IAA4C,0CAAiB;IAkB3D;;;;;;;;OAQG;IACH,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;QAC/B,CAAC;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;;IACxC,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAA9B,CAA8B,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,EAAtD,CAAsD,CAAC,CAAC;IAC9E,CAAC;IACH,6BAAC;AAAD,CAAC,AAvDD,CAA4C,iBAAiB,GAuD5D;;AAED;;;;;;;GAOG;AACH,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;QACvB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAO,KAAK;aACT,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAA/C,CAA+C,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,EAAjD,CAAiD,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,EAAhD,CAAgD,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 00000000..d08160d2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 00000000..fb5c7a1a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACzB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js new file mode 100644 index 00000000..d7c31181 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); +export { GenerateClientRequestIdPolicy }; +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 00000000..56227fae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmD,iDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;IAGtC,CAAC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,oCAAC;AAAD,CAAC,AAfD,CAAmD,iBAAiB,GAenE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts new file mode 100644 index 00000000..62399673 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function logPolicy(logger?: any): RequestPolicyFactory; +export declare class LogPolicy extends BaseRequestPolicy { + logger?: any; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, logger?: any); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map new file mode 100644 index 00000000..4108f58f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,SAAS,CAAC,MAAM,GAAE,GAAiB,GAAG,oBAAoB,CAMzE;AAED,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,CAAC,EAAE,GAAG,CAAC;gBAGX,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,MAAM,GAAE,GAAiB;IAMpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js new file mode 100644 index 00000000..cfa3e24d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +export { LogPolicy }; +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map new file mode 100644 index 00000000..ef5ba965 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACpD,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA+B,6BAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;IACvB,CAAC;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,EAA3B,CAA2B,CAAC,CAAC;IAC/F,CAAC;IACH,gBAAC;AAAD,CAAC,AAfD,CAA+B,iBAAiB,GAe/C;;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 00000000..3ff8522d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 00000000..39d10a81 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 00000000..2fe65883 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export function getDefaultUserAgentKey() { + return "x-ms-command-name"; +} +export function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 00000000..d96a097c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAc/F,MAAM,UAAU,sBAAsB;IACpC,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 00000000..1fac8884 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 00000000..9ad41835 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js new file mode 100644 index 00000000..40622431 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 00000000..8db5ec53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,OAAO,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts new file mode 100644 index 00000000..5c2b5740 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { ProxySettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 00000000..ad17d1ce --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js new file mode 100644 index 00000000..c8144dee --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map new file mode 100644 index 00000000..7fe9c537 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts new file mode 100644 index 00000000..565fce8f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts @@ -0,0 +1,24 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * @internal + */ +export declare const noProxyList: string[]; +/** + * @internal + */ +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, proxySettings: ProxySettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map new file mode 100644 index 00000000..ab119736 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;GAEG;AACH,eAAO,MAAM,WAAW,EAAE,MAAM,EAAkB,CAAC;AAGnD;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AA+CD;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAUtC;AA4BD,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAS/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js new file mode 100644 index 00000000..2f0ef862 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +/** + * @internal + */ +export var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +export function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map new file mode 100644 index 00000000..86f8ba63 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC;;GAEG;AACH,MAAM,CAAC,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;GAEG;AACH,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED,iEAAiE;AACjE,2FAA2F;AAC3F,mMAAmM;AACnM,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,EAAX,CAAW,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,EAAX,CAAW,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts new file mode 100644 index 00000000..308149b7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts @@ -0,0 +1,18 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + handleRedirects: boolean; + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map new file mode 100644 index 00000000..94f16bbc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;GAEG;AACH,MAAM,WAAW,eAAe;IAI9B,eAAe,EAAE,OAAO,CAAC;IAMzB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED,qBAAa,cAAe,SAAQ,iBAAiB;IAIjD,QAAQ,CAAC,UAAU;gBAFnB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,UAAU,SAAK;IAKnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js new file mode 100644 index 00000000..f6eda4bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { URLBuilder } from "../url"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +export function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +export { RedirectPolicy }; +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map new file mode 100644 index 00000000..af97d7c3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAkBzB,MAAM,CAAC,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,MAAM,UAAU,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAoC,kCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;IAG1B,CAAC;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,EAAjC,CAAiC,CAAC,CAAC;IAC3D,CAAC;IACH,qBAAC;AAAD,CAAC,AAdD,CAAoC,iBAAiB,GAcpD;;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACpE,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;YAC9E,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,uEAAuE;QACvE,+EAA+E;QAC/E,+GAA+G;QAC/G,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,EAA/C,CAA+C,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,EAAhC,CAAgC,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;IACvE,oEAAoE;IACpE,wCAAwC;IACxC,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts new file mode 100644 index 00000000..ff3e61d1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts @@ -0,0 +1,71 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} +export declare abstract class BaseRequestPolicy implements RequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike); + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions implements RequestPolicyOptionsLike { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map new file mode 100644 index 00000000..41e5568e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF,MAAM,WAAW,aAAa;IAC5B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED,8BAAsB,iBAAkB,YAAW,aAAa;IAE5D,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAF7C,SAAS,aACE,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB;aAG7B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAqB,YAAW,wBAAwB;IACvD,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js new file mode 100644 index 00000000..16fb9b30 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +export { BaseRequestPolicy }; +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); +export { RequestPolicyOptions }; +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map new file mode 100644 index 00000000..506b5bb7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAc/D;IACE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAIJ;;;;OAIG;IACI,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;IACH,wBAAC;AAAD,CAAC,AA1BD,IA0BC;;AAsBD;;GAEG;AACH;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;IACH,2BAAC;AAAD,CAAC,AA3BD,IA2BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 00000000..305f6175 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 00000000..51bbc8ae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js new file mode 100644 index 00000000..ad7107ff --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js @@ -0,0 +1,163 @@ +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +export { RPRegistrationPolicy }; +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return utils + .delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map new file mode 100644 index 00000000..848ce2fa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA0C,wCAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;IAG7B,CAAC;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAzC,CAAyC,CAAC,CAAC;IACnE,CAAC;IACH,2BAAC;AAAD,CAAC,AAdD,CAA0C,iBAAiB,GAc1D;;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;YACZ,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;IAChE,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAO,KAAK;iBACT,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,EAAnD,CAAmD,CAAC,CAAC;SACpE;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts new file mode 100644 index 00000000..b4b2f2df --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts @@ -0,0 +1,12 @@ +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicyFactory, RequestPolicy, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map new file mode 100644 index 00000000..91ae8e81 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,aAAa,EACb,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EAC1B,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js new file mode 100644 index 00000000..6a3be13b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); +export { SigningPolicy }; +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map new file mode 100644 index 00000000..1cc0247b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmC,iCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;IAGzD,CAAC;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;QAAzC,CAAyC,CAC1C,CAAC;IACJ,CAAC;IACH,oBAAC;AAAD,CAAC,AAlBD,CAAmC,iBAAiB,GAkBnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 00000000..153e5604 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL: number; + DEFAULT_CLIENT_RETRY_COUNT: number; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 00000000..c38e714f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;;;;GASG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;IACzB,6BAA6B,SAAa;IAC1C,0BAA0B,SAAK;IAC/B,iCAAiC,SAAa;IAC9C,iCAAiC,SAAY;gBAG3C,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js new file mode 100644 index 00000000..06b7c64f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +export { SystemErrorRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData(policy, retryData, err); + if (!(err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, utils.delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 00000000..3225fab1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAczB,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH;IAA4C,0CAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;;IAC/C,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,EAA3C,CAA2C,CAAC,CAAC;IACnE,CAAC;IACH,6BAAC;AAAD,CAAC,AArCD,CAA4C,iBAAiB,GAqC5D;;AAED;;;;;;GAMG;AACH,SAAS,WAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;yBAElD,CAAA,GAAG;wBACH,GAAG,CAAC,IAAI;wBACR,WAAW,CAAC,MAAM,EAAE,SAAS,CAAC;wBAC9B,CAAC,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAM,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;wBACP,qFAAqF;wBACrF,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 00000000..41ea6409 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyOptionsLike, RequestPolicyFactory } from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} +export declare function throttlingRetryPolicy(maxRetries?: number): RequestPolicyFactory; +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number); + sendRequest(httpRequest: WebResourceLike): Promise; + private retry; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 00000000..124aa8ea --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,wBAAwB,EACxB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,wBAAgB,qBAAqB,CACnC,UAAU,GAAE,MAA4B,GACvC,oBAAoB,CAMtB;AAED;;;;;GAKG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,UAAU,CAAS;gBAEf,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,EAAE,UAAU,EAAE,MAAM;IAK/E,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAMxE,KAAK;WA2BL,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js new file mode 100644 index 00000000..9768e050 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +export function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); +export { ThrottlingRetryPolicy }; +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map new file mode 100644 index 00000000..91fa3691 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,MAAM,UAAU,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;QACpE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH;IAA2C,yCAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;IAC/B,CAAC;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;oBAC9C,CAAC,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;6BAEE,CAAA,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IACH,4BAAC;AAAD,CAAC,AA7DD,CAA2C,iBAAiB,GA6D3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts new file mode 100644 index 00000000..2ba12d40 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts @@ -0,0 +1,21 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +export declare function getDefaultUserAgentValue(): string; +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map new file mode 100644 index 00000000..4a3c4c1c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAwB7D,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAanF;AAED,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAC3C,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB,EACjC,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js new file mode 100644 index 00000000..21a43914 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, } from "./requestPolicy"; +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +export var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +export function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +export function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); +export { UserAgentPolicy }; +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map new file mode 100644 index 00000000..3e2fb850 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,MAAM,UAAU,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,sBAAsB,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;QAChD,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAqC,mCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;IAG/B,CAAC;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;IACH,sBAAC;AAAD,CAAC,AAxBD,CAAqC,iBAAiB,GAwBrD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts new file mode 100644 index 00000000..3ac84057 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts @@ -0,0 +1,33 @@ +/// +import * as http from "http"; +import * as https from "https"; +import { ProxySettings } from "./serviceClient"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { + [key: string]: any; + }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: HttpsOverHttpsOptions): http.Agent | https.Agent; +export {}; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map new file mode 100644 index 00000000..13222d68 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAG/B,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAEhD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA0BZ;AAID,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,iBAAiB,CAAC;CAC3B;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,qBAAqB,GACnC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js new file mode 100644 index 00000000..c8322344 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map new file mode 100644 index 00000000..fe48db5c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAInC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts new file mode 100644 index 00000000..a235f856 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts @@ -0,0 +1,11 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map new file mode 100644 index 00000000..7ad8dc84 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B,GAAG,MAAM;IACT,GAAG,MAAM;IACT,GAAG,OAAO;IACV,KAAK,MAAM;IACX,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js new file mode 100644 index 00000000..6128d013 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map new file mode 100644 index 00000000..29c806c1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,qBAMX;AAND,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANW,qBAAqB,KAArB,qBAAqB,QAMhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts new file mode 100644 index 00000000..7b889c4c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts @@ -0,0 +1,14 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string; + static readonly REQUEST_ABORTED_ERROR: string; + static readonly PARSE_ERROR: string; + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse, body?: any); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map new file mode 100644 index 00000000..d058e5a5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,SAAU,SAAQ,KAAK;IAClC,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE,MAAM,CAAC,QAAQ,CAAC,qBAAqB,EAAE,MAAM,CAA2B;IACxE,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC,IAAI,CAAC,EAAE,GAAG,CAAC;gBAET,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,IAAI,CAAC,EAAE,GAAG;CAWb"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js b/node_modules/@azure/ms-rest-js/es/lib/restError.js new file mode 100644 index 00000000..90625203 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); +export { RestError }; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js.map b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map new file mode 100644 index 00000000..5772499d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F;IAA+B,6BAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;IACnD,CAAC;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,AA3BD,CAA+B,KAAK,GA2BnC;SA3BY,SAAS"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts new file mode 100644 index 00000000..2fb084f0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts @@ -0,0 +1,130 @@ +export declare class Serializer { + readonly modelMappers: { + [key: string]: any; + }; + readonly isXML?: boolean | undefined; + constructor(modelMappers?: { + [key: string]: any; + }, isXML?: boolean | undefined); + validateConstraints(mapper: Mapper, value: any, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any; +} +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +export interface SimpleMapperType { + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +export interface CompositeMapperType { + name: "Composite"; + className?: string; + modelProperties?: { + [propertyName: string]: Mapper; + }; + additionalProperties?: Mapper; + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} +export declare function serializeObject(toSerialize: any): any; +export declare const MapperType: { + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + Date: "Date"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map new file mode 100644 index 00000000..2516371c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAMA,qBAAa,UAAU;aAEH,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;aACpC,KAAK,CAAC;gBADN,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO,EACzC,KAAK,CAAC,qBAAS;IAGjC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IA8DzE;;;;;;;;;;OAUG;IACH,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,GAAG;IAmEhE;;;;;;;;;;OAUG;IACH,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,GAAG;CA4ExE;AAipBD,MAAM,WAAW,iBAAiB;IAChC,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,WAAW,CAAC;IAKlB,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IACrD,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,UAAU,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,YAAY,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,IAAI,EAAE,UAAU,CAAC;IACjB,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG,MAAM,WAAW,wBAAwB;IACvC,cAAc,EAAE,MAAM,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;IACnB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD,IAAI,EAAE,oBAAoB,CAAC;IAC3B,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C,IAAI,EAAE,cAAc,CAAC;CACtB;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,EAAE,OAAO,CAAC;CAC1B;AAGD,wBAAgB,eAAe,CAAC,WAAW,EAAE,GAAG,GAAG,GAAG,CAqBrD;AAaD,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js b/node_modules/@azure/ms-rest-js/es/lib/serializer.js new file mode 100644 index 00000000..9e60d917 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js @@ -0,0 +1,789 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +export { Serializer }; +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +export function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +export var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map new file mode 100644 index 00000000..cfeb735e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC;IACE,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAtB,CAAsB,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACH,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAElC,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;;;OAUG;IACH,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd;;;;mBAIG;gBACH,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IACH,iBAAC;AAAD,CAAC,AAzOD,IAyOC;;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IAClD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;wBACjC,oDAAoD;wBACpD,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,EAArB,CAAqB,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AAoHD,0BAA0B;AAC1B,MAAM,UAAU,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,IAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts new file mode 100644 index 00000000..0a54a863 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts @@ -0,0 +1,168 @@ +/// +import { TokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { Mapper, Serializer } from "./serializer"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { Agent } from "http"; +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export declare class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor(credentials?: ServiceClientCredentials | TokenCredential, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map new file mode 100644 index 00000000..26684286 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AACtE,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAElF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC9E,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAIL,aAAa,EACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAqB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AACnE,OAAO,EAEL,2BAA2B,EAC5B,MAAM,kCAAkC,CAAC;AAQ1C,OAAO,EAA0B,eAAe,EAAkB,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAEL,oBAAoB,EAGrB,MAAM,0BAA0B,CAAC;AAKlC,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AAIjG,OAAO,EAEL,qBAAqB,EACrB,eAAe,EAGhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAI/C,OAAO,EAAE,KAAK,EAAE,MAAM,MAAM,CAAC;AAM7B;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,KAAK,CAAC;IACZ,KAAK,EAAE,KAAK,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAClC;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,qBAAa,aAAa;IACxB;;;;;;;OAOG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAA2B;IAEjE,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;;OAKG;gBAED,WAAW,CAAC,EAAE,wBAAwB,GAAG,eAAe,EACxD,OAAO,CAAC,EAAE,oBAAoB;IAiDhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACH,oBAAoB,CAClB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CAkMzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAsEN;AAkFD,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAkEL;AA6BD,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAqEd"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js new file mode 100644 index 00000000..89b329bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js @@ -0,0 +1,488 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __spreadArrays } from "tslib"; +import { isTokenCredential } from "@azure/core-auth"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { isStreamOperation } from "./operationSpec"; +import { deserializationPolicy, } from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { userAgentPolicy, getDefaultUserAgentHeaderName, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { MapperType } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { isWebResourceLike, WebResource, } from "./webResource"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { AzureIdentityCredentialAdapter, azureResourceManagerEndpoints, } from "./credentials/azureIdentityTokenCredentialAdapter"; +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +export { ServiceClient }; +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + var propertyPathLength = propertyPath.length; + for (var i = 0; i < propertyPathLength - 1; ++i) { + var propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +export function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map new file mode 100644 index 00000000..fd4afe62 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAEtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAKxD,OAAO,EACL,0BAA0B,EAC1B,8BAA8B,GAG/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAiB,MAAM,iBAAiB,CAAC;AACnE,OAAO,EACL,qBAAqB,GAEtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EACL,eAAe,EACf,6BAA6B,EAC7B,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,sBAAsB,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAGL,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAIL,iBAAiB,EACjB,WAAW,GACZ,MAAM,eAAe,CAAC;AAGvB,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,WAAW,EAAE,uBAAuB,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AAEzE,OAAO,EACL,8BAA8B,EAC9B,6BAA6B,GAC9B,MAAM,mDAAmD,CAAC;AAqG3D;;;GAGG;AACH;IA0BE;;;;;OAKG;IACH,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,iBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAAzD,CAAyD,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;gBACJ,2CAA2C;iBAC1C,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,EAAxE,CAAwE,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,EAAP,CAAO,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,oBAAC;AAAD,CAAC,AAhUD,IAgUC;;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,IAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,IAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;IAFF,CAEE,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,EAAxC,CAAwC,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,8EAA8E;YAC9E,mDAAmD;YACnD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC;YACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts new file mode 100644 index 00000000..710361f8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts @@ -0,0 +1,149 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: any): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: any): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map new file mode 100644 index 00000000..0c8174a5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,GAAG,GAAG,IAAI;IAW5D;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,GAAG,GAAG,IAAI;IASpF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAqCJ,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;WAUpD,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js b/node_modules/@azure/ms-rest-js/es/lib/url.js new file mode 100644 index 00000000..0de4c160 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js @@ -0,0 +1,595 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +export { URLQuery }; +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +export { URLBuilder }; +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +export { URLToken }; +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +export { URLTokenizer }; +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js.map b/node_modules/@azure/ms-rest-js/es/lib/url.js.map new file mode 100644 index 00000000..68d9e430 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;IAqHvF,CAAC;IAnHC;;OAEG;IACI,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;;;OAIG;IACI,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACW,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,eAAC;AAAD,CAAC,AAtHD,IAsHC;;AAED;;GAEG;AACH;IAAA;IAiPA,CAAC;IA1OC;;;OAGG;IACI,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,iBAAC;AAAD,CAAC,AAjPD,IAiPC;;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;IACH,eAAC;AAAD,CAAC,AAtBD,IAsBC;;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IACH,mBAAC;AAAD,CAAC,AA1DD,IA0DC;;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,EAAlC,CAAkC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA/C,CAA+C,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts new file mode 100644 index 00000000..5c40c44b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map new file mode 100644 index 00000000..89074b25 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js new file mode 100644 index 00000000..1c76f7b2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map new file mode 100644 index 00000000..47aea45f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts new file mode 100644 index 00000000..1dfd806b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map new file mode 100644 index 00000000..3af7f47b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js new file mode 100644 index 00000000..6626510c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map new file mode 100644 index 00000000..0ca9f545 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts new file mode 100644 index 00000000..aee2743d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts @@ -0,0 +1,94 @@ +export declare const Constants: { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: string; + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: string; + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map new file mode 100644 index 00000000..4184fb19 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,SAAS;IACpB;;;;OAIG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;;;;WAKG;;;;;;;;;;;;;;IAgBL;;OAEG;;QAED;;;;;WAKG;;;QAKH;;;;;;;WAOG;;QAGH;;;;;WAKG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js new file mode 100644 index 00000000..8fd4942d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.1", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map new file mode 100644 index 00000000..3118dae4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,MAAM,CAAC,IAAM,SAAS,GAAG;IACvB;;;;OAIG;IACH,aAAa,EAAE,OAAO;IAEtB;;;;;OAKG;IACH,IAAI,EAAE,OAAO;IAEb;;;;;OAKG;IACH,KAAK,EAAE,QAAQ;IAEf;;;;;OAKG;IACH,UAAU,EAAE,YAAY;IAExB;;;;;OAKG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;;;;WAKG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;;;;WAKG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;;;;WAOG;QACH,WAAW,EAAE,aAAa;QAE1B;;;;;WAKG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts new file mode 100644 index 00000000..56eabdc7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts @@ -0,0 +1,157 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export declare function objectValues(obj: { + [key: string]: any; +}): any[]; +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: any): Promise; +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export declare function mergeObjects(source: { + [key: string]: any; +}, target: { + [key: string]: any; +}): { + [key: string]: any; +}; +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export declare function delay(t: number, value?: T): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): Function; +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): Function; +export declare function prepareXMLRootList(obj: any, elementName: string): { + [x: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtor: any, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: any): boolean; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map new file mode 100644 index 00000000..ebb7618d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,eAAO,MAAM,MAAM,SAIM,CAAC;AAE1B;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAMjD;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,GAAG,GAAG,EAAE,CAkB/D;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,2BAA2B,CAAC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAAE,SAAS,EAAE,GAAG,gBAMvF;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,EAAE,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE;;EAK1F;AAED;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAEzD;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAcjE;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GAAG,QAAQ,CAc7F;AAED,wBAAgB,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM;;EAK/D;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,UAAU,EAAE,GAAG,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAMrE;AAID;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,GAAG,GAAG,OAAO,CAEnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js new file mode 100644 index 00000000..24877684 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js @@ -0,0 +1,229 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { v4 as uuidv4 } from "uuid"; +import { Constants } from "./constants"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj) { + var result = []; + if (obj && obj instanceof Object) { + for (var key in obj) { + if (obj.hasOwnProperty(key)) { + result.push(obj[key]); + } + } + } + else { + throw new Error("The provided object " + JSON.stringify(obj, undefined, 2) + " is not a valid object that can be " + "enumerated to provide its values as an array."); + } + return result; +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source, target) { + Object.keys(source).forEach(function (key) { + target[key] = source[key]; + }); + return target; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map new file mode 100644 index 00000000..a59b7020 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAIpC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,GAA2B;IACtD,IAAM,MAAM,GAAU,EAAE,CAAC;IACzB,IAAI,GAAG,IAAI,GAAG,YAAY,MAAM,EAAE;QAChC,KAAK,IAAM,GAAG,IAAI,GAAG,EAAE;YACrB,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;gBAC3B,MAAM,CAAC,IAAI,CAAO,GAAI,CAAC,GAAG,CAAC,CAAC,CAAC;aAC9B;SACF;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CACb,yBAAuB,IAAI,CAAC,SAAS,CACnC,GAAG,EACH,SAAS,EACT,CAAC,CACF,wCAAqC,GAAG,+CAA+C,CACzF,CAAC;KACH;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,MAA8B,EAAE,MAA8B;IACzF,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,UAAC,GAAG;QAC9B,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;IAC5B,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,EAAd,CAAc,EAAE,CAAC,CAAC,EAAnC,CAAmC,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QACtB,CAAC,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC5E,CAAC,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts new file mode 100644 index 00000000..a9d4674c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts @@ -0,0 +1,5 @@ +export declare function parseXML(str: string): Promise; +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map new file mode 100644 index 00000000..466a7b1c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAIA,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAUlD;AAsFD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAMlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js new file mode 100644 index 00000000..d792befa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var parser = new DOMParser(); +export function parseXML(str) { + try { + var dom = parser.parseFromString(str, "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +var errorNS = ""; +try { + errorNS = parser.parseFromString("INVALID", "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI; +} +catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} +function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +// tslint:disable-next-line:no-null-keyword +var doc = document.implementation.createDocument(null, null, null); +var serializer = new XMLSerializer(); +export function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); +} +function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map new file mode 100644 index 00000000..285dec34 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;AAC/B,MAAM,UAAU,QAAQ,CAAC,GAAW;IAClC,IAAI;QACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;QAC3D,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAAO,GAAG,EAAE,CAAC;AACjB,IAAI;IACF,OAAO,GAAG,MAAM,CAAC,eAAe,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;SAC3F,YAAa,CAAC;CAClB;AAAC,OAAO,OAAO,EAAE;IAChB,oFAAoF;CACrF;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,IAAI,OAAO,EAAE;QACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QACxE,IAAI,YAAY,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;SAClD;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU;IAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SAC7C;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;SAClC;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,2CAA2C;AAC3C,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC;IACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACxC,OAAO,CACL,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,CAC9F,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,IAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;QAAjC,IAAM,GAAG,SAAA;QACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;IAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;QAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;QAClC,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;YAAxB,IAAM,SAAS,YAAA;YAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;gBAAlD,IAAM,KAAK,SAAA;gBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;YAA/B,IAAM,GAAG,SAAA;YACZ,IAAI,GAAG,KAAK,GAAG,EAAE;gBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAAzC,IAAM,IAAI,SAAA;oBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM;gBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAAzC,IAAM,KAAK,SAAA;oBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;KAChE;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts new file mode 100644 index 00000000..3290810e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts @@ -0,0 +1,5 @@ +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +export declare function parseXML(str: string): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map new file mode 100644 index 00000000..e8c53499 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAKA,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAQlE;AAED,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAmBlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js new file mode 100644 index 00000000..3e3bd25d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as xml2js from "xml2js"; +export function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +export function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map new file mode 100644 index 00000000..0b09e4fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAW,EAAE,GAAS;gBAChD,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts new file mode 100644 index 00000000..7eb39af1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts @@ -0,0 +1,337 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper } from "./serializer"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; + removeEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; +} +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: any): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export declare class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + query?: { + [key: string]: any; + }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + abortSignal?: AbortSignalLike; + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + constructor(url?: string, method?: HttpMethods, body?: any, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, agentSettings?: AgentSettings, redirectLimit?: number); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + [key: string]: any; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map new file mode 100644 index 00000000..3943c0b7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAElD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAE/D,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AACZ,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,aAAa,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IACzC,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAC;IAC5D,gBAAgB,EAAE,CAChB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;IACV,mBAAmB,EAAE,CACnB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;CACX;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAgBxE;AAED;;;;;;;GAOG;AACH,qBAAa,WAAW;IACtB,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,WAAW,CAAC;IACpB,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,eAAe,EAAE,OAAO,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B,iDAAiD;IACjD,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;gBAG7D,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,GAAG,EACV,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,aAAa,CAAC,EAAE,aAAa,EAC7B,aAAa,CAAC,EAAE,MAAM;IAoBxB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IA6MpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,GAAG,CAAC;IACX,eAAe,EAAE,OAAO,CAAC;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js b/node_modules/@azure/ms-rest-js/es/lib/webResource.js new file mode 100644 index 00000000..8fd1460f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); +export { WebResource }; +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map new file mode 100644 index 00000000..8cd8e4d6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAEhF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA2J5C,MAAM,UAAU,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;GAOG;AACH;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACzF,CAAC,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,IAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;YAC7B,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,kBAAC;AAAD,CAAC,AArVD,IAqVC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts new file mode 100644 index 00000000..8a823d69 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeaders; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map new file mode 100644 index 00000000..b1aa40f7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,eAAe,EAAyB,MAAM,eAAe,CAAC;AACvE,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAuG7E;AAgBD,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,eAa/C"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js new file mode 100644 index 00000000..4fa6828c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; +}()); +export { XhrHttpClient }; +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map new file mode 100644 index 00000000..1c00438a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAG/F,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAG5C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH;IAAA;IAwGA,CAAC;IAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;SAC7E;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAM,UAAQ,GAAG;gBACf,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;gBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gBAAxC,IAAM,OAAO,SAAA;gBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;YAAhD,IAAM,MAAM,SAAA;YACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAEhE,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;YAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,wCAAwC;oBACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;wBACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;4BACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;gCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;4BACxB,CAAC,CAAC,CAAC;4BACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;wBAC9C,CAAC,CAAC,CAAC;wBACH,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,QAAQ,UAAA;yBACT,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;gBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oBAC3B,OAAA,OAAO,CAAC;wBACN,OAAO,SAAA;wBACP,MAAM,EAAE,GAAG,CAAC,MAAM;wBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;wBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;qBAC7B,CAAC;gBALF,CAKE,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IACH,oBAAC;AAAD,CAAC,AAxGD,IAwGC;;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;YACxC,OAAA,QAAQ,CAAC;gBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;aAC7B,CAAC;QAFF,CAEE,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,IAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA3B,IAAM,IAAI,oBAAA;QACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;IAFD,CAEC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts new file mode 100644 index 00000000..2368e054 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise> { + return Promise.resolve({}); + } + + processRequest(_operationResponse: HttpOperationResponse): Promise { + return Promise.resolve(); + } + + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return fetch(input, init); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts new file mode 100644 index 00000000..cface5af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { [x: string]: any }; +} + +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?: { [x: string]: any }; + + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error( + `options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.` + ); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise { + if (!webResource) { + return Promise.reject( + new Error(`webResource cannot be null or undefined and must be of type "object".`) + ); + } + + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts new file mode 100644 index 00000000..33992d8b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { Constants as MSRestConstants } from "../util/constants"; +import { WebResource } from "../webResource"; + +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; + +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export const azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; + +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential: TokenCredential; + private scopes: string | string[]; + constructor( + azureTokenCredential: TokenCredential, + scopes: string | string[] = "https://management.azure.com/.default" + ) { + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + + public async getToken(): Promise { + const accessToken = await this.azureTokenCredential.getToken(this.scopes); + if (accessToken !== null) { + const result: TokenResponse = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return result; + } else { + throw new Error("Could find token for scope"); + } + } + + public async signRequest(webResource: WebResource) { + const tokenResponse = await this.getToken(); + webResource.headers.set( + MSRestConstants.HeaderConstants.AUTHORIZATION, + `${tokenResponse.tokenType} ${tokenResponse.accessToken}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts new file mode 100644 index 00000000..e92242d3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; + +export class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor( + userName: string, + password: string, + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME + ) { + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts new file mode 100644 index 00000000..7e9096bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export type Authenticator = (challenge: object) => Promise; diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts new file mode 100644 index 00000000..bf6615ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string) { + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts new file mode 100644 index 00000000..9a0247da --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "../webResource"; + +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts new file mode 100644 index 00000000..01516302 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) { + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set( + HeaderConstants.AUTHORIZATION, + `${this.authorizationScheme} ${this.token}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts new file mode 100644 index 00000000..aef8969e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts new file mode 100644 index 00000000..55d435a7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts new file mode 100644 index 00000000..5ac7a79d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts new file mode 100644 index 00000000..2f1c29dc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts new file mode 100644 index 00000000..d28b0118 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import AbortController from "abort-controller"; +import FormData from "form-data"; + +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeaders, HttpHeadersLike } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Readable, Transform } from "stream"; + +interface FetchError extends Error { + code?: string; + errno?: string; + type?: string; +} + +export type CommonRequestInfo = string; // we only call fetch() on string urls. + +export type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; + +export type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; + +export abstract class FetchHttpClient implements HttpClient { + async sendRequest(httpRequest: WebResourceLike): Promise { + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error( + "'httpRequest' (WebResource) cannot be null or undefined and must be of type object." + ); + } + + const abortController = new AbortController(); + let abortListener: ((event: any) => void) | undefined; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + abortListener = (event: Event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + + if (httpRequest.formData) { + const formData: any = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set( + "Content-Type", + `multipart/form-data; boundary=${requestForm.getBoundary()}` + ); + } else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + let loadedBytes = 0; + const uploadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + httpRequest.onUploadProgress!({ loadedBytes }); + callback(undefined, chunk); + }, + }); + + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } else { + uploadReportStream.end(body); + } + + body = uploadReportStream; + } + + const platformSpecificRequestInit: Partial = await this.prepareRequest( + httpRequest + ); + + const requestInit: RequestInit = { + body: body, + headers: httpRequest.headers.rawHeaders(), + method: httpRequest.method, + signal: abortController.signal, + redirect: "manual", + ...platformSpecificRequestInit, + }; + + let operationResponse: HttpOperationResponse | undefined; + try { + const response: CommonResponse = await this.fetch(httpRequest.url, requestInit); + + const headers = parseHeaders(response.headers); + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? ((response.body as unknown) as NodeJS.ReadableStream) + : undefined, + bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined, + redirected: response.redirected, + url: response.url, + }; + + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody: ReadableStream | undefined = response.body || undefined; + + if (isReadableStream(responseBody)) { + let loadedBytes = 0; + const downloadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + onDownloadProgress({ loadedBytes }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } else { + const length = parseInt(headers.get("Content-Length")!) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + + await this.processRequest(operationResponse); + + return operationResponse; + } catch (error) { + const fetchError: FetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError( + fetchError.message, + RestError.REQUEST_SEND_ERROR, + undefined, + httpRequest + ); + } else if (fetchError.type === "aborted") { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + throw fetchError; + } finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse?.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody); + } + + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + httpRequest.abortSignal?.removeEventListener("abort", abortListener!); + return; + }) + .catch((_e) => {}); + } + } + } + + abstract async prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract async processRequest(operationResponse: HttpOperationResponse): Promise; + abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} + +function isReadableStream(body: any): body is Readable { + return body && typeof body.pipe === "function"; +} + +function isStreamComplete(stream: Readable): Promise { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} + +export function parseHeaders(headers: Headers): HttpHeadersLike { + const httpHeaders = new HttpHeaders(); + + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + + return httpHeaders; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpClient.ts b/node_modules/@azure/ms-rest-js/lib/httpClient.ts new file mode 100644 index 00000000..0d01984c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpClient.ts @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestPolicy } from "./policies/requestPolicy"; + +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy {} diff --git a/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts new file mode 100644 index 00000000..698279a4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName: string) { + return headerName.toLowerCase(); +} + +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + + /** + * The value of the header. + */ + value: string; +} + +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export type RawHttpHeaders = { [headerName: string]: string }; + +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} + +export function isHttpHeadersLike(object?: any): object is HttpHeadersLike { + if (!object || typeof object !== "object") { + return false; + } + + if ( + typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function" + ) { + return true; + } + return false; +} + +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + private readonly _headersMap: { [headerKey: string]: HttpHeader }; + + constructor(rawHeaders?: RawHttpHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + public set(headerName: string, headerValue: string | number): void { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + public get(headerName: string): string | undefined { + const header: HttpHeader = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + public contains(headerName: string): boolean { + return !!this._headersMap[getHeaderKey(headerName)]; + } + + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + public remove(headerName: string): boolean { + const result: boolean = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + + /** + * Get the headers that are contained this collection as an object. + */ + public rawHeaders(): RawHttpHeaders { + const result: RawHttpHeaders = {}; + for (const headerKey in this._headersMap) { + const header: HttpHeader = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + } + + /** + * Get the headers that are contained in this collection as an array. + */ + public headersArray(): HttpHeader[] { + const headers: HttpHeader[] = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerNames(): string[] { + const headerNames: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerValues(): string[] { + const headerValues: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + + /** + * Get the JSON object representation of this HTTP header collection. + */ + public toJson(): RawHttpHeaders { + return this.rawHeaders(); + } + + /** + * Get the string representation of this HTTP header collection. + */ + public toString(): string { + return JSON.stringify(this.toJson()); + } + + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + public clone(): HttpHeaders { + return new HttpHeaders(this.rawHeaders()); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts new file mode 100644 index 00000000..c61dfd53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; + +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + + /** + * The HTTP response status (e.g. 200) + */ + status: number; + + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} + +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob {} +} + +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { [key: string]: any }; + + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} + +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts new file mode 100644 index 00000000..a8cf3a53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF, + + /** + * An error log. + */ + ERROR, + + /** + * A warning log. + */ + WARNING, + + /** + * An information log. + */ + INFO, +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts new file mode 100644 index 00000000..47929c43 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; + +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(public minimumLogLevel: HttpPipelineLogLevel) {} + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/msRest.ts b/node_modules/@azure/ms-rest-js/lib/msRest.ts new file mode 100644 index 00000000..0b0ea24c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/msRest.ts @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/// + +export { + WebResource, + WebResourceLike, + HttpRequestBody, + RequestPrepareOptions, + HttpMethods, + ParameterValue, + RequestOptionsBase, + TransferProgressEvent, + AbortSignalLike, +} from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, + ParameterPath, +} from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { + AgentSettings, + ProxySettings, + ServiceClient, + ServiceClientOptions, + flattenResponse, +} from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { + TelemetryInfo, + userAgentPolicy, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +export { + DeserializationContentTypes, + deserializationPolicy, + deserializeResponseBody, +} from "./policies/deserializationPolicy"; +export { + MapperType, + SimpleMapperType, + CompositeMapperType, + DictionaryMapperType, + SequenceMapperType, + EnumMapperType, + Mapper, + BaseMapper, + CompositeMapper, + SequenceMapper, + DictionaryMapper, + EnumMapper, + MapperConstraints, + PolymorphicDiscriminator, + Serializer, + UrlParameterValue, + serializeObject, +} from "./serializer"; +export { + stripRequest, + stripResponse, + delay, + executePromisesSequentially, + generateUuid, + encodeUri, + ServiceCallback, + promiseToCallback, + promiseToServiceCallback, + isValidUuid, + applyMixins, + isNode, + isDuration, +} from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; + +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; diff --git a/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts new file mode 100644 index 00000000..d40b4efc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as tough from "tough-cookie"; +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +import { createProxyAgent, ProxyAgent } from "./proxyAgent"; + +export class NodeFetchHttpClient extends FetchHttpClient { + private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + + async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return (node_fetch(input, init) as unknown) as Promise; + } + + async prepareRequest(httpRequest: WebResourceLike): Promise> { + const requestInit: Partial = {}; + + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } else { + resolve(cookie); + } + }); + }); + + httpRequest.headers.set("Cookie", cookieString); + } + + if (httpRequest.agentSettings) { + const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } else if (httpAgent) { + requestInit.agent = httpAgent; + } + } else if (httpRequest.proxySettings) { + const tunnel: ProxyAgent = createProxyAgent( + httpRequest.url, + httpRequest.proxySettings, + httpRequest.headers + ); + requestInit.agent = tunnel.agent; + } + + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } else { + const options: http.AgentOptions | https.AgentOptions = { keepAlive: true }; + const agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + + return requestInit; + } + + async processRequest(operationResponse: HttpOperationResponse): Promise { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader != undefined) { + await new Promise((resolve, reject) => { + this.cookieJar!.setCookie( + setCookieHeader, + operationResponse.request.url, + { ignoreError: true }, + (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + } + ); + }); + } + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationArguments.ts b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts new file mode 100644 index 00000000..55090db9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestOptionsBase } from "./webResource"; + +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationParameter.ts b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts new file mode 100644 index 00000000..150b18ae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; + +export type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath }; + +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} + +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} + +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} + +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter: OperationParameter): string { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} + +export function getPathStringFromParameterPath( + parameterPath: ParameterPath, + mapper: Mapper +): string { + let result: string; + if (typeof parameterPath === "string") { + result = parameterPath; + } else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } else { + result = mapper.serializedName!; + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationResponse.ts b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts new file mode 100644 index 00000000..0fbea88c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { Mapper } from "./serializer"; + +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationSpec.ts b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts new file mode 100644 index 00000000..fe8b84a0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, +} from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { MapperType, Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; + +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { [responseCode: string]: OperationResponse }; +} + +export function isStreamOperation(operationSpec: OperationSpec): boolean { + let result = false; + for (const statusCode in operationSpec.responses) { + const operationResponse: OperationResponse = operationSpec.responses[statusCode]; + if ( + operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream + ) { + result = true; + break; + } + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts new file mode 100644 index 00000000..cca71847 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + +export function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw agentNotSupportedInBrowser; + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw agentNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw agentNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts new file mode 100644 index 00000000..624a14be --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +export function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new AgentPolicy(nextPolicy, options, agentSettings!); + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + agentSettings: AgentSettings + ) { + super(nextPolicy, options); + this.agentSettings = agentSettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts new file mode 100644 index 00000000..f14f78cc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts @@ -0,0 +1,294 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { OperationResponse } from "../operationResponse"; +import { OperationSpec, isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { Mapper, MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} + +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy( + deserializationContentTypes?: DeserializationContentTypes +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} + +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + public readonly jsonContentTypes: string[]; + public readonly xmlContentTypes: string[]; + + constructor( + nextPolicy: RequestPolicy, + deserializationContentTypes: DeserializationContentTypes | undefined, + options: RequestPolicyOptionsLike + ) { + super(nextPolicy, options); + + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + } + + public async sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response: HttpOperationResponse) => + deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response) + ); + } +} + +function getOperationResponse( + parsedResponse: HttpOperationResponse +): undefined | OperationResponse { + let result: OperationResponse | undefined; + const request: WebResourceLike = parsedResponse.request; + const operationSpec: OperationSpec | undefined = request.operationSpec; + if (operationSpec) { + const operationResponseGetter: + | undefined + | (( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse) = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} + +function shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean { + const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) = + parsedResponse.request.shouldDeserialize; + let result: boolean; + if (shouldDeserialize === undefined) { + result = true; + } else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } else { + result = shouldDeserialize(parsedResponse); + } + return result; +} + +export function deserializeResponseBody( + jsonContentTypes: string[], + xmlContentTypes: string[], + response: HttpOperationResponse +): Promise { + return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => { + const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + const statusCode: number = parsedResponse.status; + + const expectedStatusCodes: string[] = Object.keys(operationSpec.responses); + + const hasNoExpectedStatusCodes: boolean = + expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + + const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse); + + const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + const defaultResponseSpec: OperationResponse = operationSpec.responses.default; + if (defaultResponseSpec) { + const initialErrorMessage: string = isStreamOperation(operationSpec) + ? `Unexpected status code: ${statusCode}` + : (parsedResponse.bodyAsText as string); + + const error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + + let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + const defaultResponseBodyMapper: Mapper | undefined = + defaultResponseSpec.bodyMapper; + if ( + defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError" + ) { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } else { + let internalError: any = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + + if (defaultResponseBodyMapper) { + let valueToDeserialize: any = parsedErrorResponse; + if ( + operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence + ) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!] + : []; + } + error.body = operationSpec.serializer.deserialize( + defaultResponseBodyMapper, + valueToDeserialize, + "error.body" + ); + } + } + } catch (defaultError) { + error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`; + } + return Promise.reject(error); + } + } else if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize: any = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize( + responseSpec.bodyMapper, + valueToDeserialize, + "operationRes.parsedBody" + ); + } catch (error) { + const restError = new RestError( + `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}` + ); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize( + responseSpec.headersMapper, + parsedResponse.headers.rawHeaders(), + "operationRes.parsedHeaders" + ); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} + +function parse( + jsonContentTypes: string[], + xmlContentTypes: string[], + operationResponse: HttpOperationResponse +): Promise { + const errorHandler = (err: Error & { code: string }) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError( + msg, + errCode, + operationResponse.status, + operationResponse.request, + operationResponse, + operationResponse.bodyAsText + ); + return Promise.reject(e); + }; + + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType: string = operationResponse.headers.get("Content-Type") || ""; + const contentComponents: string[] = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if ( + contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1) + ) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + + return Promise.resolve(operationResponse); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts new file mode 100644 index 00000000..74aeb60d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts @@ -0,0 +1,220 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { RestError } from "../restError"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function exponentialRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ExponentialRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_RETRY_COUNT = 3; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + function isNumber(n: any): n is number { + return typeof n === "number"; + } + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry( + policy: ExponentialRetryPolicy, + statusCode: number | undefined, + retryData: RetryData +): boolean { + if ( + statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505 + ) { + return false; + } + + let currentCount: number; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData( + policy: ExponentialRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +function retry( + policy: ExponentialRetryPolicy, + request: WebResourceLike, + response?: HttpOperationResponse, + retryData?: RetryData, + requestError?: RetryError +): Promise { + retryData = updateRetryData(policy, retryData, requestError); + const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(() => policy._nextPolicy.sendRequest(request.clone())) + .then((res) => retry(policy, request, res, retryData, undefined)) + .catch((err) => retry(policy, request, response, retryData, err)); + } else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = + retryData.error || + new RestError( + "Failed to send the request.", + RestError.REQUEST_SEND_ERROR, + response && response.status, + response && response.request, + response + ); + return Promise.reject(err); + } else { + return Promise.resolve(response); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts new file mode 100644 index 00000000..aa17a318 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function generateClientRequestIdPolicy( + requestIdHeaderName = "x-ms-client-request-id" +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} + +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + private _requestIdHeaderName: string + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts new file mode 100644 index 00000000..90d44ca8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function logPolicy(logger: any = console.log): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} + +export class LogPolicy extends BaseRequestPolicy { + logger?: any; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + logger: any = console.log + ) { + super(nextPolicy, options); + this.logger = logger; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response)); + } +} + +function logResponse( + policy: LogPolicy, + response: HttpOperationResponse +): Promise { + policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`); + policy.logger(`>> Response status code: ${response.status}`); + const responseBody = response.bodyAsText; + policy.logger(`>> Body: ${responseBody}`); + return Promise.resolve(response); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts new file mode 100644 index 00000000..9b688a4a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/* + * NOTE: When moving this file, please update "browser" section in package.json + * and "plugins" section in webpack.testconfig.ts. + */ + +import { TelemetryInfo } from "./userAgentPolicy"; + +interface NavigatorEx extends Navigator { + // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2 + readonly oscpu: string; +} + +export function getDefaultUserAgentKey(): string { + return "x-ms-command-name"; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const navigator = self.navigator as NavigatorEx; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + + return [osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts new file mode 100644 index 00000000..ea523dd1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as os from "os"; +import { TelemetryInfo } from "./userAgentPolicy"; +import { Constants } from "../util/constants"; + +export function getDefaultUserAgentKey(): string { + return Constants.HeaderConstants.USER_AGENT; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + + return [runtimeInfo, osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts new file mode 100644 index 00000000..a1c194d3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ProxySettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + +export function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined { + return undefined; +} + +export function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw proxyNotSupportedInBrowser; + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw proxyNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw proxyNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts new file mode 100644 index 00000000..bda59608 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; + +/** + * @internal + */ +export const noProxyList: string[] = loadNoProxy(); +const byPassedList: Map = new Map(); + +/** + * @internal + */ +export function getEnvironmentValue(name: string): string | undefined { + if (process.env[name]) { + return process.env[name]; + } else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} + +function loadEnvironmentProxyValue(): string | undefined { + if (!process) { + return undefined; + } + + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + + return httpsProxy || allProxy || httpProxy; +} + +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri: string): boolean | undefined { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost()!; + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} + +/** + * @internal + */ +export function loadNoProxy(): string[] { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + + return []; +} + +/** + * @internal + */ +function extractAuthFromUrl( + url: string +): { username?: string; password?: string; urlWithoutAuth: string } { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} + +export function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} + +export function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ProxyPolicy(nextPolicy, options, proxySettings!); + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + proxySettings: ProxySettings + ) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts new file mode 100644 index 00000000..fd158d10 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { URLBuilder } from "../url"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /* + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + + /* + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} + +export const DefaultRedirectOptions: RedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; + +export function redirectPolicy(maximumRetries = 20): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} + +export class RedirectPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly maxRetries = 20 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} + +function handleRedirect( + policy: RedirectPolicy, + response: HttpOperationResponse, + currentRetries: number +): Promise { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if ( + locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries)) + ) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)) + .then((res) => recordRedirect(res, request.url)); + } + + return Promise.resolve(response); +} + +function recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts new file mode 100644 index 00000000..d25d3146 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; + +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; + +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} + +export abstract class BaseRequestPolicy implements RequestPolicy { + protected constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike + ) {} + + public abstract sendRequest(webResource: WebResourceLike): Promise; + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return this._options.shouldLog(logLevel); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + this._options.log(logLevel, message); + } +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions implements RequestPolicyOptionsLike { + constructor(private _logger?: HttpPipelineLogger) {} + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return ( + !!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel + ); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts new file mode 100644 index 00000000..47f11f3e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} + +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly _retryTimeout = 30 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} + +function registerIfNeeded( + policy: RPRegistrationPolicy, + request: WebResourceLike, + response: HttpOperationResponse +): Promise { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText as string); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return ( + registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + }) + ); + } + } + + return Promise.resolve(response); +} + +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials( + originalRequest: WebResourceLike, + reuseUrlToo = false +): WebResourceLike { + const reqOptions: WebResourceLike = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + + return reqOptions; +} + +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body: string): string { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } catch (err) { + // do nothing; + } + if ( + responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration" + ) { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} + +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url: string): string { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} + +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP( + policy: RPRegistrationPolicy, + urlPrefix: string, + provider: string, + originalRequest: WebResourceLike +): Promise { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + + return policy._nextPolicy.sendRequest(reqOptions).then((response) => { + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} + +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus( + policy: RPRegistrationPolicy, + url: string, + originalRequest: WebResourceLike +): Promise { + const reqOptions: any = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + + return policy._nextPolicy.sendRequest(reqOptions).then((res) => { + const obj = res.parsedBody as any; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } else { + return utils + .delay(policy._retryTimeout * 1000) + .then(() => getRegistrationStatus(policy, url, originalRequest)); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts new file mode 100644 index 00000000..91b31843 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicyFactory, + RequestPolicy, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function signingPolicy( + authenticationProvider: ServiceClientCredentials +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} + +export class SigningPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + public authenticationProvider: ServiceClientCredentials + ) { + super(nextPolicy, options); + } + + signRequest(request: WebResourceLike): Promise { + return this.authenticationProvider.signRequest(request); + } + + public sendRequest(request: WebResourceLike): Promise { + return this.signRequest(request).then((nextRequest) => + this._nextPolicy.sendRequest(nextRequest) + ); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts new file mode 100644 index 00000000..9edc5eef --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function systemErrorRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SystemErrorRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + DEFAULT_CLIENT_RETRY_COUNT = 3; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + this.retryCount = typeof retryCount === "number" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = + typeof retryInterval === "number" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean { + let currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData( + policy: SystemErrorRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +async function retry( + policy: SystemErrorRetryPolicy, + request: WebResourceLike, + operationResponse: HttpOperationResponse, + err?: RetryError, + retryData?: RetryData +): Promise { + retryData = updateRetryData(policy, retryData, err); + if ( + err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT") + ) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await utils.delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } catch (error) { + return retry(policy, request, operationResponse, error, retryData); + } + } else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts new file mode 100644 index 00000000..20d77cdd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyOptionsLike, + RequestPolicyFactory, +} from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; + +const StatusCodes = Constants.HttpConstants.StatusCodes; +const DEFAULT_RETRY_COUNT = 3; + +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} + +export function throttlingRetryPolicy( + maxRetries: number = DEFAULT_RETRY_COUNT +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} + +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit: number; + + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) { + super(nextPolicy, options); + this.retryLimit = retryLimit; + } + + public async sendRequest(httpRequest: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => { + return this.retry(httpRequest, response, 0); + }); + } + + private async retry( + httpRequest: WebResourceLike, + httpResponse: HttpOperationResponse, + retryCount: number + ): Promise { + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return httpResponse; + } + + const retryAfterHeader: string | undefined = httpResponse.headers.get( + Constants.HeaderConstants.RETRY_AFTER + ); + + if (retryAfterHeader && retryCount < this.retryLimit) { + const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader( + retryAfterHeader + ); + if (delayInMs) { + await delay(delayInMs); + const res = await this._nextPolicy.sendRequest(httpRequest); + return this.retry(httpRequest, res, retryCount + 1); + } + } + + return httpResponse; + } + + public static parseRetryAfterHeader(headerValue: string): number | undefined { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } else { + return retryAfterInSeconds * 1000; + } + } + + public static parseDateRetryAfterHeader(headerValue: string): number | undefined { + try { + const now: number = Date.now(); + const date: number = Date.parse(headerValue); + const diff = date - now; + + return Number.isNaN(diff) ? undefined : diff; + } catch (error) { + return undefined; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts new file mode 100644 index 00000000..c4e70db9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export type TelemetryInfo = { key?: string; value?: string }; + +function getRuntimeInfo(): TelemetryInfo[] { + const msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + + return [msRestRuntime]; +} + +function getUserAgentString( + telemetryInfo: TelemetryInfo[], + keySeparator = " ", + valueSeparator = "/" +): string { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} + +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + +export function getDefaultUserAgentValue(): string { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} + +export function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory { + const key: string = + !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + const value: string = + !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} + +export class UserAgentPolicy extends BaseRequestPolicy { + constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike, + protected headerKey: string, + protected headerValue: string + ) { + super(_nextPolicy, _options); + } + + sendRequest(request: WebResourceLike): Promise { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + + addUserAgentHeader(request: WebResourceLike): void { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts new file mode 100644 index 00000000..93dd369f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; + +import { ProxySettings } from "./serviceClient"; +import { URLBuilder } from "./url"; +import { HttpHeadersLike } from "./httpHeaders"; + +export type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent }; +export function createProxyAgent( + requestUrl: string, + proxySettings: ProxySettings, + headers?: HttpHeadersLike +): ProxyAgent { + const tunnelOptions: tunnel.HttpsOverHttpsOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost() as string, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } else if (proxySettings.username) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`; + } + + const requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + const isRequestHttps = requestScheme.toLowerCase() === "https"; + const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + const isProxyHttps = proxyScheme.toLowerCase() === "https"; + + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + + return proxyAgent; +} + +// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel +// createIunnel() is only imported by tests. +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { [key: string]: any }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} + +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} + +export function createTunnel( + isRequestHttps: boolean, + isProxyHttps: boolean, + tunnelOptions: HttpsOverHttpsOptions +): http.Agent | https.Agent { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } else { + return tunnel.httpOverHttp(tunnelOptions); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts new file mode 100644 index 00000000..ba6cca81 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi", +} diff --git a/node_modules/@azure/ms-rest-js/lib/restError.ts b/node_modules/@azure/ms-rest-js/lib/restError.ts new file mode 100644 index 00000000..88479c44 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/restError.ts @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string = "REQUEST_SEND_ERROR"; + static readonly REQUEST_ABORTED_ERROR: string = "REQUEST_ABORTED_ERROR"; + static readonly PARSE_ERROR: string = "PARSE_ERROR"; + + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor( + message: string, + code?: string, + statusCode?: number, + request?: WebResourceLike, + response?: HttpOperationResponse, + body?: any + ) { + super(message); + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + this.body = body; + + Object.setPrototypeOf(this, RestError.prototype); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/serializer.ts b/node_modules/@azure/ms-rest-js/lib/serializer.ts new file mode 100644 index 00000000..9ea94fcc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serializer.ts @@ -0,0 +1,1063 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; + +export class Serializer { + constructor( + public readonly modelMappers: { [key: string]: any } = {}, + public readonly isXML?: boolean + ) {} + + validateConstraints(mapper: Mapper, value: any, objectName: string): void { + const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => { + throw new Error( + `"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.` + ); + }; + if (mapper.constraints && value != undefined) { + const { + ExclusiveMaximum, + ExclusiveMinimum, + InclusiveMaximum, + InclusiveMinimum, + MaxItems, + MaxLength, + MinItems, + MinLength, + MultipleOf, + Pattern, + UniqueItems, + } = mapper.constraints; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern: RegExp = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if ( + UniqueItems && + value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i) + ) { + failValidation("UniqueItems", UniqueItems); + } + } + } + + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any { + let payload: any = {}; + const mapperType = mapper.type.name as string; + if (!objectName) { + objectName = mapper.serializedName!; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + + if (mapper.isConstant) { + object = mapper.defaultValue; + } + + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + + const { required, nullable } = mapper; + + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + + if (object == undefined) { + payload = object; + } else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } else if (mapperType.match(/^Enum$/gi) !== null) { + const enumMapper: EnumMapper = mapper as EnumMapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } else if ( + mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null + ) { + payload = serializeDateTypes(mapperType, object, objectName); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName); + } else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName); + } + } + return payload; + } + + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + + let payload: any; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName!; + } + + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName); + } else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } else if (responseBody === "false") { + payload = false; + } else { + payload = responseBody; + } + } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType( + this, + mapper as DictionaryMapper, + responseBody, + objectName + ); + } + } + + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + + return payload; + } +} + +function trimEnd(str: string, ch: string) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} + +function bufferToBase64Url(buffer: any): string | undefined { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} + +function base64UrlToByteArray(str: string): Uint8Array | undefined { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} + +function splitSerializeName(prop: string | undefined): string[] { + const classes: string[] = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + + return classes; +} + +function dateToUnixTime(d: string | Date): number | undefined { + if (!d) { + return undefined; + } + + if (typeof d.valueOf() === "string") { + d = new Date(d as string); + } + return Math.floor((d as Date).getTime() / 1000); +} + +function unixTimeToDate(n: number): Date | undefined { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} + +function serializeBasicTypes(typeName: string, objectName: string, value: any): any { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error( + `${objectName} with value "${value}" must be of type string and a valid uuid.` + ); + } + } else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } else if (typeName.match(/^Stream$/gi) !== null) { + const objectType = typeof value; + if ( + objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob) + ) { + throw new Error( + `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.` + ); + } + } + } + return value; +} + +function serializeEnumType(objectName: string, allowedValues: Array, value: any): any { + if (!allowedValues) { + throw new Error( + `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.` + ); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error( + `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify( + allowedValues + )}.` + ); + } + return value; +} + +function serializeByteArrayType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} + +function serializeBase64UrlType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} + +function serializeDateTypes(typeName: string, value: any, objectName: string) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } else if (typeName.match(/^DateTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } else if (typeName.match(/^UnixTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error( + `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.` + ); + } + value = dateToUnixTime(value); + } else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error( + `${objectName} must be a string in ISO 8601 format. Instead was "${value}".` + ); + } + value = value; + } + } + return value; +} + +function serializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + object: any, + objectName: string +) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} + +function serializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + object: any, + objectName: string +) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(object)) { + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} + +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties( + serializer: Serializer, + mapper: CompositeMapper, + objectName: string +): { [propertyName: string]: Mapper } { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const className = mapper.type.className; + if (!className) { + throw new Error( + `Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify( + mapper, + undefined, + 2 + )}".` + ); + } + + const modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${className}".`); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error( + `modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify( + modelMapper + )}" of type "${className}" for object "${objectName}".` + ); + } + } + + return modelProps; +} + +function serializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + objectName: string +) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + + if (object != undefined) { + const payload: any = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + + let propName: string | undefined; + let parentObject: any = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } else { + const paths = splitSerializeName(propertyMapper.serializedName!); + propName = paths.pop(); + + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + + if (parentObject != undefined) { + const propertyObjectName = + propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if ( + polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined + ) { + toSerialize = mapper.serializedName; + } + + const serializedValue = serializer.serialize( + propertyMapper, + toSerialize, + propertyObjectName + ); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue }; + } else { + parentObject[propName] = serializedValue; + } + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize( + additionalPropertiesMapper, + object[clientPropName], + objectName + '["' + clientPropName + '"]' + ); + } + } + } + + return payload; + } + return object; +} + +function isSpecialXmlProperty(propertyName: string): boolean { + return ["$", "_"].includes(propertyName); +} + +function deserializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + responseBody: any, + objectName: string +): any { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance: { [key: string]: any } = {}; + const handledPropertyNames: string[] = []; + + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName!); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + + const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary: any = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize( + (propertyMapper as DictionaryMapper).type.value, + responseBody[headerKey], + propertyObjectName + ); + } + + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize( + propertyMapper, + responseBody.$[xmlName!], + propertyObjectName + ); + } else { + const propertyName = xmlElementName || xmlName || serializedName; + let unwrappedProperty = responseBody[propertyName!]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName!]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!]; + + const isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize( + propertyMapper, + unwrappedProperty, + propertyObjectName + ); + } + } else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if ( + polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined + ) { + propertyInstance = mapper.serializedName; + } + + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [key, value] of Object.entries(instance)) { + if (!arrayInstance.hasOwnProperty(key)) { + arrayInstance[key] = value; + } + } + instance = arrayInstance; + } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + instance[key] = serializedValue; + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName: string) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize( + additionalPropertiesMapper, + responseBody[responsePropName], + objectName + '["' + responsePropName + '"]' + ); + } + } + } else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if ( + instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key) + ) { + instance[key] = responseBody[key]; + } + } + } + + return instance; +} + +function deserializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} + +function deserializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`); + } + return tempArray; + } + return responseBody; +} + +function getPolymorphicMapper( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + polymorphicPropertyName: "clientName" | "serializedName" +): CompositeMapper { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = + discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} + +function getPolymorphicDiscriminatorRecursively( + serializer: Serializer, + mapper: CompositeMapper +): PolymorphicDiscriminator | undefined { + return ( + mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className) + ); +} + +function getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) { + return ( + typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator + ); +} + +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} + +export type MapperType = + | SimpleMapperType + | CompositeMapperType + | SequenceMapperType + | DictionaryMapperType + | EnumMapperType; + +export interface SimpleMapperType { + name: + | "Base64Url" + | "Boolean" + | "ByteArray" + | "Date" + | "DateTime" + | "DateTimeRfc1123" + | "Object" + | "Stream" + | "String" + | "TimeSpan" + | "UnixTime" + | "Uuid" + | "Number" + | "any"; +} + +export interface CompositeMapperType { + name: "Composite"; + + // Only one of the two below properties should be present. + // Use className to reference another type definition, + // and use modelProperties/additionalProperties when the reference to the other type has been resolved. + className?: string; + + modelProperties?: { [propertyName: string]: Mapper }; + additionalProperties?: Mapper; + + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} + +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} + +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} + +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} + +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} + +export type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; + +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} + +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} + +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} + +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} + +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} + +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} + +// TODO: why is this here? +export function serializeObject(toSerialize: any): any { + if (toSerialize == undefined) return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } else if (typeof toSerialize === "object") { + const dictionary: { [key: string]: any } = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} + +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o: Array): { [K in T]: K } { + const result: any = {}; + for (const key of o) { + result[key] = key; + } + return result; +} + +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); diff --git a/node_modules/@azure/ms-rest-js/lib/serviceClient.ts b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts new file mode 100644 index 00000000..e6e32185 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts @@ -0,0 +1,852 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { TokenCredential, isTokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { + getPathStringFromParameter, + getPathStringFromParameterPath, + OperationParameter, + ParameterPath, +} from "./operationParameter"; +import { isStreamOperation, OperationSpec } from "./operationSpec"; +import { + deserializationPolicy, + DeserializationContentTypes, +} from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { + userAgentPolicy, + getDefaultUserAgentHeaderName, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { + RequestOptionsBase, + RequestPrepareOptions, + WebResourceLike, + isWebResourceLike, + WebResource, +} from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { Agent } from "http"; +import { + AzureIdentityCredentialAdapter, + azureResourceManagerEndpoints, +} from "./credentials/azureIdentityTokenCredentialAdapter"; + +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} + +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} + +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: + | RequestPolicyFactory[] + | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} + +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient: HttpClient; + private readonly _requestPolicyOptions: RequestPolicyOptionsLike; + + private readonly _requestPolicyFactories: RequestPolicyFactory[]; + private readonly _withCredentials: boolean; + + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor( + credentials?: ServiceClientCredentials | TokenCredential, + options?: ServiceClientOptions + ) { + if (!options) { + options = {}; + } + + if (options.baseUri) { + this.baseUri = options.baseUri; + } + + let serviceClientCredentials: ServiceClientCredentials | undefined; + if (isTokenCredential(credentials)) { + let scope: string | undefined = undefined; + if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) { + scope = `${options.baseUri}/.default`; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } else { + serviceClientCredentials = credentials; + } + + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + + let requestPolicyFactories: RequestPolicyFactory[]; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } else { + requestPolicyFactories = createDefaultRequestPolicyFactories( + serviceClientCredentials, + options + ); + if (options.requestPolicyFactories) { + const newRequestPolicyFactories: + | void + | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + + let httpRequest: WebResourceLike; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } catch (error) { + return Promise.reject(error); + } + + let httpPipeline: RequestPolicy = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create( + httpPipeline, + this._requestPolicyOptions + ); + } + } + return httpPipeline.sendRequest(httpRequest); + } + + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest( + operationArguments: OperationArguments, + operationSpec: OperationSpec, + callback?: ServiceCallback + ): Promise { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + + const httpRequest = new WebResource(); + + let result: Promise; + try { + const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error( + "If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use." + ); + } + + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + + const requestUrl: URLBuilder = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue: string = getOperationArgumentValueFromParameter( + this, + operationArguments, + urlParameter, + operationSpec.serializer + ); + urlParameterValue = operationSpec.serializer.serialize( + urlParameter.mapper, + urlParameterValue, + getPathStringFromParameter(urlParameter) + ); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll( + `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, + urlParameterValue + ); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + queryParameter, + operationSpec.serializer + ); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize( + queryParameter.mapper, + queryParameterValue, + getPathStringFromParameter(queryParameter) + ); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } else if ( + queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if ( + queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null + ) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if ( + queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter( + queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), + queryParameterValue + ); + } + } + } + httpRequest.url = requestUrl.toString(); + + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + headerParameter, + operationSpec.serializer + ); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize( + headerParameter.mapper, + headerValue, + getPathStringFromParameter(headerParameter) + ); + const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper) + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } else { + httpRequest.headers.set( + headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), + headerValue + ); + } + } + } + } + + const options: RequestOptionsBase | undefined = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + + httpRequest.withCredentials = this._withCredentials; + + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + + result = this.sendRequest(httpRequest).then((res) => + flattenResponse(res, operationSpec.responses[res.status]) + ); + } catch (error) { + result = Promise.reject(error); + } + + const cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + + return result; + } +} + +export function serializeRequestBody( + serviceClient: ServiceClient, + httpRequest: WebResourceLike, + operationArguments: OperationArguments, + operationSpec: OperationSpec +): void { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + operationSpec.requestBody, + operationSpec.serializer + ); + + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + const requestBodyParameterPathString: string = getPathStringFromParameter( + operationSpec.requestBody + ); + httpRequest.body = operationSpec.serializer.serialize( + bodyMapper, + httpRequest.body, + requestBodyParameterPathString + ); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML( + utils.prepareXMLRootList( + httpRequest.body, + xmlElementName || xmlName || serializedName! + ), + { rootName: xmlName || serializedName } + ); + } else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } catch (error) { + throw new Error( + `Error "${error.message}" occurred in serializing the payload - ${JSON.stringify( + serializedName, + undefined, + " " + )}.` + ); + } + } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue: any = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + formDataParameter, + operationSpec.serializer + ); + if (formDataParameterValue != undefined) { + const formDataParameterPropertyName: string = + formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize( + formDataParameter.mapper, + formDataParameterValue, + getPathStringFromParameter(formDataParameter) + ); + } + } + } +} + +function isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory { + return typeof instance.create === "function"; +} + +function getValueOrFunctionResult( + value: undefined | string | ((defaultValue: string) => string), + defaultValueCreator: () => string +): string { + let result: string; + if (typeof value === "string") { + result = value; + } else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} + +function createDefaultRequestPolicyFactories( + credentials: ServiceClientCredentials | RequestPolicyFactory | undefined, + options: ServiceClientOptions +): RequestPolicyFactory[] { + const factories: RequestPolicyFactory[] = []; + + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } else { + factories.push(signingPolicy(credentials)); + } + } + + const userAgentHeaderName: string = getValueOrFunctionResult( + options.userAgentHeaderName, + getDefaultUserAgentHeaderName + ); + const userAgentHeaderValue: string = getValueOrFunctionResult( + options.userAgent, + getDefaultUserAgentValue + ); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + + const redirectOptions = { + ...DefaultRedirectOptions, + ...options.redirectOptions, + }; + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + + factories.push(deserializationPolicy(options.deserializationContentTypes)); + + const proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + + return factories; +} + +export type PropertyParent = { [propertyName: string]: any }; + +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent { + if (parent && propertyPath) { + const propertyPathLength: number = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName: string = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} + +function getOperationArgumentValueFromParameter( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameter: OperationParameter, + serializer: Serializer +): any { + return getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + parameter.parameterPath, + parameter.mapper, + serializer + ); +} + +export function getOperationArgumentValueFromParameterPath( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameterPath: ParameterPath, + parameterMapper: Mapper, + serializer: Serializer +): any { + let value: any; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } else { + let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath( + operationArguments, + parameterPath + ); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + + // Serialize just for validation purposes. + const parameterPathString: string = getPathStringFromParameterPath( + parameterPath, + parameterMapper + ); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } else { + if (parameterMapper.required) { + value = {}; + } + + for (const propertyName in parameterPath) { + const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![ + propertyName + ]; + const propertyPath: ParameterPath = parameterPath[propertyName]; + const propertyValue: any = getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + propertyPath, + propertyMapper, + serializer + ); + // Serialize just for validation purposes. + const propertyPathString: string = getPathStringFromParameterPath( + propertyPath, + propertyMapper + ); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} + +interface PropertySearchResult { + propertyValue?: any; + propertyFound: boolean; +} + +function getPropertyFromParameterPath( + parent: { [parameterName: string]: any }, + parameterPath: string[] +): PropertySearchResult { + const result: PropertySearchResult = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart: string = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} + +export function flattenResponse( + _response: HttpOperationResponse, + responseSpec: OperationResponse | undefined +): RestResponse { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + + const addOperationResponse = (obj: {}) => + Object.defineProperty(obj, "_response", { + value: _response, + }); + + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse({ + ...parsedHeaders, + blobBody: _response.blobBody, + readableStreamBody: _response.readableStreamBody, + }); + } + + const modelProperties = + (typeName === "Composite" && (bodyMapper as CompositeMapper).type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some( + (k) => modelProperties[k].serializedName === "" + ); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + const arrayResponse = [...parsedBody] as RestResponse & any[]; + + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); + } + } + + if ( + bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody) + ) { + // primitive body types and HEAD booleans + return addOperationResponse({ + ...parsedHeaders, + body: _response.parsedBody, + }); + } + + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/url.ts b/node_modules/@azure/ms-rest-js/lib/url.ts new file mode 100644 index 00000000..74d55028 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/url.ts @@ -0,0 +1,659 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { replaceAll } from "./util/utils"; + +type URLQueryParseState = "ParameterName" | "ParameterValue" | "Invalid"; + +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {}; + + /** + * Get whether or not there any query parameters in this URLQuery. + */ + public any(): boolean { + return Object.keys(this._rawQuery).length > 0; + } + + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + public set(parameterName: string, parameterValue: any): void { + if (parameterName) { + if (parameterValue != undefined) { + const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } else { + delete this._rawQuery[parameterName]; + } + } + } + + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + public get(parameterName: string): string | string[] | undefined { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + public toString(): string { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + + /** + * Parse a URLQuery from the provided text. + */ + public static parse(text: string): URLQuery { + const result = new URLQuery(); + + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + + let currentState: URLQueryParseState = "ParameterName"; + + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter: string = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + + case "&": + parameterName = ""; + parameterValue = ""; + break; + + default: + parameterName += currentCharacter; + break; + } + break; + + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + + default: + parameterValue += currentCharacter; + break; + } + break; + + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + + return result; + } +} + +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + private _scheme: string | undefined; + private _host: string | undefined; + private _port: string | undefined; + private _path: string | undefined; + private _query: URLQuery | undefined; + + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + public setScheme(scheme: string | undefined): void { + if (!scheme) { + this._scheme = undefined; + } else { + this.set(scheme, "SCHEME"); + } + } + + /** + * Get the scheme that has been set in this URL. + */ + public getScheme(): string | undefined { + return this._scheme; + } + + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + public setHost(host: string | undefined): void { + if (!host) { + this._host = undefined; + } else { + this.set(host, "SCHEME_OR_HOST"); + } + } + + /** + * Get the host that has been set in this URL. + */ + public getHost(): string | undefined { + return this._host; + } + + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + public setPort(port: number | string | undefined): void { + if (port == undefined || port === "") { + this._port = undefined; + } else { + this.set(port.toString(), "PORT"); + } + } + + /** + * Get the port that has been set in this URL. + */ + public getPort(): string | undefined { + return this._port; + } + + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + public setPath(path: string | undefined): void { + if (!path) { + this._path = undefined; + } else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } else { + this.set(path, "PATH"); + } + } + } + + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + public appendPath(path: string | undefined): void { + if (path) { + let currentPath: string | undefined = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + + if (path.startsWith("/")) { + path = path.substring(1); + } + + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + + /** + * Get the path that has been set in this URL. + */ + public getPath(): string | undefined { + return this._path; + } + + /** + * Set the query in this URL. + */ + public setQuery(query: string | undefined): void { + if (!query) { + this._query = undefined; + } else { + this._query = URLQuery.parse(query); + } + } + + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + public setQueryParameter(queryParameterName: string, queryParameterValue: any): void { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + public getQueryParameterValue(queryParameterName: string): string | string[] | undefined { + return this._query ? this._query.get(queryParameterName) : undefined; + } + + /** + * Get the query in this URL. + */ + public getQuery(): string | undefined { + return this._query ? this._query.toString() : undefined; + } + + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set(text: string, startState: URLTokenizerState): void { + const tokenizer = new URLTokenizer(text, startState); + + while (tokenizer.next()) { + const token: URLToken | undefined = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + + case "HOST": + this._host = token.text || undefined; + break; + + case "PORT": + this._port = token.text || undefined; + break; + + case "PATH": + const tokenPath: string | undefined = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + + public toString(): string { + let result = ""; + + if (this._scheme) { + result += `${this._scheme}://`; + } + + if (this._host) { + result += this._host; + } + + if (this._port) { + result += `:${this._port}`; + } + + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + + return result; + } + + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + public replaceAll(searchValue: string, replaceValue: string): void { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + + public static parse(text: string): URLBuilder { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} + +type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; + +type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; + +export class URLToken { + public constructor(public readonly text: string, public readonly type: URLTokenType) {} + + public static scheme(text: string): URLToken { + return new URLToken(text, "SCHEME"); + } + + public static host(text: string): URLToken { + return new URLToken(text, "HOST"); + } + + public static port(text: string): URLToken { + return new URLToken(text, "PORT"); + } + + public static path(text: string): URLToken { + return new URLToken(text, "PATH"); + } + + public static query(text: string): URLToken { + return new URLToken(text, "QUERY"); + } +} + +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character: string): boolean { + const characterCode: number = character.charCodeAt(0); + return ( + (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */ + ); +} + +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + + public constructor(readonly _text: string, state?: URLTokenizerState) { + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + public current(): URLToken | undefined { + return this._currentToken; + } + + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + public next(): boolean { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + + case "HOST": + nextHost(this); + break; + + case "PORT": + nextPort(this); + break; + + case "PATH": + nextPath(this); + break; + + case "QUERY": + nextQuery(this); + break; + + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} + +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer: URLTokenizer): string { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} + +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer: URLTokenizer): boolean { + return tokenizer._currentIndex < tokenizer._textLength; +} + +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer: URLTokenizer): string { + return tokenizer._text[tokenizer._currentIndex]; +} + +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer: URLTokenizer, step?: number): void { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} + +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string { + let endIndex: number = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} + +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string { + let result = ""; + + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter: string = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + + return result; +} + +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer: URLTokenizer): string { + return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character)); +} + +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string { + return readWhile( + tokenizer, + (character: string) => terminatingCharacters.indexOf(character) === -1 + ); +} + +function nextScheme(tokenizer: URLTokenizer): void { + const scheme: string = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "HOST"; + } +} + +function nextSchemeOrHost(tokenizer: URLTokenizer): void { + const schemeOrHost: string = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } + } +} + +function nextHost(tokenizer: URLTokenizer): void { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + + const host: string = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPort(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + + const port: string = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPath(tokenizer: URLTokenizer): void { + const path: string = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextQuery(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + + const query: string = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts new file mode 100644 index 00000000..e6b59741 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return btoa(value); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.ts new file mode 100644 index 00000000..ea0c2e0b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.ts @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return Buffer.from(value).toString("base64"); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer); + return bufferValue.toString("base64"); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + return Buffer.from(value, "base64"); +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/constants.ts b/node_modules/@azure/ms-rest-js/lib/util/constants.ts new file mode 100644 index 00000000..fbcaca60 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/constants.ts @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export const Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.1", + + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + + StatusCodes: { + TooManyRequests: 429, + }, + }, + + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + + AUTHORIZATION_SCHEME: "Bearer", + + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; diff --git a/node_modules/@azure/ms-rest-js/lib/util/utils.ts b/node_modules/@azure/ms-rest-js/lib/util/utils.ts new file mode 100644 index 00000000..0675100d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/utils.ts @@ -0,0 +1,288 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { v4 as uuidv4 } from "uuid"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "./constants"; + +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export const isNode = + typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck: { protocol: string }): boolean { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} + +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri: string): string { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} + +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response: HttpOperationResponse): any { + const strippedResponse: any = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} + +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request: WebResourceLike): WebResourceLike { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} + +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid: string): boolean { + const validUuidRegex = new RegExp( + "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", + "ig" + ); + return validUuidRegex.test(uuid); +} + +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj: { [key: string]: any }): any[] { + const result: any[] = []; + if (obj && obj instanceof Object) { + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + result.push((obj)[key]); + } + } + } else { + throw new Error( + `The provided object ${JSON.stringify( + obj, + undefined, + 2 + )} is not a valid object that can be ` + `enumerated to provide its values as an array.` + ); + } + return result; +} + +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid(): string { + return uuidv4(); +} + +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories: Array, kickstart: any) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} + +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) { + Object.keys(source).forEach((key) => { + target[key] = source[key]; + }); + return target; +} + +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t: number, value?: T): Promise { + return new Promise((resolve) => setTimeout(() => resolve(value), t)); +} + +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + ( + err: Error | RestError | null, + result?: TResult, + request?: WebResourceLike, + response?: HttpOperationResponse + ): void; +} + +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: Function): void => { + promise.then( + (data: any) => { + cb(undefined, data); + }, + (err: Error) => { + cb(err); + } + ); + }; +} + +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: ServiceCallback): void => { + promise.then( + (data: HttpOperationResponse) => { + process.nextTick(cb, undefined, data.parsedBody as T, data.request, data); + }, + (err: Error) => { + process.nextTick(cb, err); + } + ); + }; +} + +export function prepareXMLRootList(obj: any, elementName: string) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + return { [elementName]: obj }; +} + +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor: any, sourceCtors: any[]): void { + sourceCtors.forEach((sourceCtors) => { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} + +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value: string): boolean { + return validateISODuration.test(value); +} + +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll( + value: string | undefined, + searchValue: string, + replaceValue: string +): string | undefined { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} + +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value: any): boolean { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts new file mode 100644 index 00000000..3a369633 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +const parser = new DOMParser(); +export function parseXML(str: string): Promise { + try { + const dom = parser.parseFromString(str, "application/xml"); + throwIfError(dom); + + const obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } catch (err) { + return Promise.reject(err); + } +} + +let errorNS = ""; +try { + errorNS = parser.parseFromString("INVALID", "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI!; +} catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} + +function throwIfError(dom: Document) { + if (errorNS) { + const parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0)!.innerHTML); + } + } +} + +function isElement(node: Node): node is Element { + return !!(node as Element).attributes; +} + +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node: Node): Element | undefined { + return isElement(node) && node.hasAttributes() ? node : undefined; +} + +function domToObject(node: Node): any { + let result: any = {}; + + const childNodeCount: number = node.childNodes.length; + + const firstChildNode: Node = node.childNodes[0]; + const onlyChildTextValue: string | undefined = + (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + + const elementWithAttributes: Element | undefined = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } else if (childNodeCount === 0) { + result = ""; + } else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject: any = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + + return result; +} + +// tslint:disable-next-line:no-null-keyword +const doc = document.implementation.createDocument(null, null, null); +const serializer = new XMLSerializer(); + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const rootName = (opts && opts.rootName) || "root"; + const dom = buildNode(obj, rootName)[0]; + return ( + '' + serializer.serializeToString(dom) + ); +} + +function buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} + +function buildNode(obj: any, elementName: string): Node[] { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + const elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName)) { + result.push(child); + } + } + return result; + } else if (typeof obj === "object") { + const elem = doc.createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === "$") { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } else { + for (const child of buildNode(obj[key], key)) { + elem.appendChild(child); + } + } + } + return [elem]; + } else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.ts new file mode 100644 index 00000000..829e5e02 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as xml2js from "xml2js"; + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} + +export function parseXML(str: string): Promise { + const xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } else { + xmlParser.parseString(str, (err?: Error, res?: any) => { + if (err) { + reject(err); + } else { + resolve(res); + } + }); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/webResource.ts b/node_modules/@azure/ms-rest-js/lib/webResource.ts new file mode 100644 index 00000000..10bb55fe --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/webResource.ts @@ -0,0 +1,668 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper, Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; + +export type HttpMethods = + | "GET" + | "PUT" + | "POST" + | "DELETE" + | "PATCH" + | "HEAD" + | "OPTIONS" + | "TRACE"; +export type HttpRequestBody = + | Blob + | string + | ArrayBuffer + | ArrayBufferView + | (() => NodeJS.ReadableStream); + +/** + * Fired in response to upload or download progress. + */ +export type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; + removeEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; +} + +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { [key: string]: any }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} + +export function isWebResourceLike(object: any): object is WebResourceLike { + if (typeof object !== "object") { + return false; + } + if ( + typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function" + ) { + return true; + } + return false; +} + +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + query?: { [key: string]: any }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + + abortSignal?: AbortSignalLike; + + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + constructor( + url?: string, + method?: HttpMethods, + body?: any, + query?: { [key: string]: any }, + headers?: { [key: string]: any } | HttpHeadersLike, + streamResponseBody?: boolean, + withCredentials?: boolean, + abortSignal?: AbortSignalLike, + timeout?: number, + onUploadProgress?: (progress: TransferProgressEvent) => void, + onDownloadProgress?: (progress: TransferProgressEvent) => void, + proxySettings?: ProxySettings, + keepAlive?: boolean, + agentSettings?: AgentSettings, + redirectLimit?: number + ) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource { + if (!options) { + throw new Error("options object is required"); + } + + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + + if (options.url && options.pathTemplate) { + throw new Error( + "options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them." + ); + } + + if ( + (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string") + ) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error( + 'The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods) + ); + } + } + this.method = options.method.toUpperCase() as HttpMethods; + + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = + baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error( + `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.` + ); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = (pathParameters as { [key: string]: any })[pathParamName]; + if ( + pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object") + ) { + throw new Error( + `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in ${pathParameters} - ${JSON.stringify( + pathParameters, + undefined, + 2 + )}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.` + ); + } + + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error( + `options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error( + `options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.` + ); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam: any = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error( + `options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize( + options.serializationMapper, + options.body, + "requestBody" + ); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + + return this; + } + + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource { + const result = new WebResource( + this.url, + this.method, + this.body, + this.query, + this.headers && this.headers.clone(), + this.streamResponseBody, + this.withCredentials, + this.abortSignal, + this.timeout, + this.onUploadProgress, + this.onDownloadProgress, + this.proxySettings, + this.keepAlive, + this.agentSettings, + this.redirectLimit + ); + + if (this.formData) { + result.formData = this.formData; + } + + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + + return result; + } +} + +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { [key: string]: any | ParameterValue }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { [key: string]: any | ParameterValue }; + formData?: { [key: string]: any }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { [key: string]: any }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { [x: string]: any }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} + +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} + +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { [key: string]: string }; + + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts new file mode 100644 index 00000000..698d701d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike, TransferProgressEvent } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { RestError } from "./restError"; + +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient implements HttpClient { + public sendRequest(request: WebResourceLike): Promise { + const xhr = new XMLHttpRequest(); + + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + + const abortSignal = request.abortSignal; + if (abortSignal) { + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + + if (request.streamResponseBody) { + return new Promise((resolve, reject) => { + xhr.addEventListener("readystatechange", () => { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }) + ); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} + +function addProgressListener( + xhr: XMLHttpRequestEventTarget, + listener?: (progress: TransferProgressEvent) => void +) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => + listener({ + loadedBytes: rawEvent.loaded, + }) + ); + } +} + +// exported locally for testing +export function parseHeaders(xhr: XMLHttpRequest) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} + +function rejectOnTerminalEvent( + request: WebResourceLike, + xhr: XMLHttpRequest, + reject: (err: any) => void +) { + xhr.addEventListener("error", () => + reject( + new RestError( + `Failed to send request to ${request.url}`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); + xhr.addEventListener("abort", () => + reject( + new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request) + ) + ); + xhr.addEventListener("timeout", () => + reject( + new RestError( + `timeout of ${xhr.timeout}ms exceeded`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/.bin/uuid b/node_modules/@azure/ms-rest-js/node_modules/.bin/uuid new file mode 120000 index 00000000..588f70ec --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/CHANGELOG.md b/node_modules/@azure/ms-rest-js/node_modules/uuid/CHANGELOG.md new file mode 100644 index 00000000..7519d19d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,229 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/CONTRIBUTING.md b/node_modules/@azure/ms-rest-js/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 00000000..4a4503d0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/LICENSE.md b/node_modules/@azure/ms-rest-js/node_modules/uuid/LICENSE.md new file mode 100644 index 00000000..39341683 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/README.md b/node_modules/@azure/ms-rest-js/node_modules/uuid/README.md new file mode 100644 index 00000000..ed27e576 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/README.md @@ -0,0 +1,505 @@ + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - Node 8, 10, 12, 14 + - Chrome, Safari, Firefox, Edge, IE 11 browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +**Upgrading from `uuid@3.x`?** Your code is probably okay, but check out [Upgrading From `uuid@3.x`](#upgrading-from-uuid3x) for details. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, + 0xc0, + 0xbd, + 0x7f, + 0x11, + 0xc0, + 0x43, + 0xda, + 0x97, + 0x5e, + 0x2a, + 0x8a, + 0xd9, + 0xeb, + 0xae, + 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, + 0x91, + 0x56, + 0xbe, + 0xc4, + 0xfb, + 0xc1, + 0xea, + 0x71, + 0xb4, + 0xef, + 0xe1, + 0x67, + 0x1c, + 0x58, + 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +To load this module directly into older browsers you can use the [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds from any of the following CDNs: + +**Using [UNPKG](https://unpkg.com/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [jsDelivr](https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/)**: + +```html + +``` + +**Using [cdnjs](https://cdnjs.com/libraries/uuid)**: + +```html + +``` + +These CDNs all provide the same [`uuidv4()`](#uuidv4options-buffer-offset) method: + +```html + +``` + +Methods for the other algorithms ([`uuidv1()`](#uuidv1options-buffer-offset), [`uuidv3()`](#uuidv3name-namespace-buffer-offset) and [`uuidv5()`](#uuidv5name-namespace-buffer-offset)) are available from the files `uuidv1.min.js`, `uuidv3.min.js` and `uuidv5.min.js` respectively. + +## "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +## Upgrading From `uuid@7.x` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7.x` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7.x`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3.x` + +"_Wait... what happened to `uuid@4.x` - `uuid@6.x`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3.x` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7.x` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3.x` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3.x` and has been removed in `uuid@7.x`. + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/bin/uuid b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/bin/uuid new file mode 100755 index 00000000..f38d2ee1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/index.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 00000000..8b5d46a7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (var i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + var output = []; + var length32 = input.length * 32; + var hexTab = '0123456789abcdef'; + + for (var i = 0; i < length32; i += 8) { + var x = input[i >> 5] >>> i % 32 & 0xff; + var hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + var a = 1732584193; + var b = -271733879; + var c = -1732584194; + var d = 271733878; + + for (var i = 0; i < x.length; i += 16) { + var olda = a; + var oldb = b; + var oldc = c; + var oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + var length8 = input.length * 8; + var output = new Uint32Array(getOutputLength(length8)); + + for (var i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + var lsw = (x & 0xffff) + (y & 0xffff); + var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 00000000..7c5b1d5a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + var v; + var arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 00000000..8abbf2ea --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,19 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +var getRandomValues; +var rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 00000000..940548ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (var i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + var l = bytes.length / 4 + 2; + var N = Math.ceil(l / 16); + var M = new Array(N); + + for (var _i = 0; _i < N; ++_i) { + var arr = new Uint32Array(16); + + for (var j = 0; j < 16; ++j) { + arr[j] = bytes[_i * 64 + j * 4] << 24 | bytes[_i * 64 + j * 4 + 1] << 16 | bytes[_i * 64 + j * 4 + 2] << 8 | bytes[_i * 64 + j * 4 + 3]; + } + + M[_i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (var _i2 = 0; _i2 < N; ++_i2) { + var W = new Uint32Array(80); + + for (var t = 0; t < 16; ++t) { + W[t] = M[_i2][t]; + } + + for (var _t = 16; _t < 80; ++_t) { + W[_t] = ROTL(W[_t - 3] ^ W[_t - 8] ^ W[_t - 14] ^ W[_t - 16], 1); + } + + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + + for (var _t2 = 0; _t2 < 80; ++_t2) { + var s = Math.floor(_t2 / 20); + var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[_t2] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 00000000..31021115 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,30 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +var byteToHex = []; + +for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 00000000..1a22591e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; + +var _clockseq; // Previous uuid creation time + + +var _lastMSecs = 0; +var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || new Array(16); + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + var seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + var msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + var tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 00000000..c9ab9a4c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +var v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 00000000..31dd8a1c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + var bytes = []; + + for (var i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export var DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export var URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + var bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 00000000..404810a4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 00000000..c08d96ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +var v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/version.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 00000000..77530e9c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/index.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 00000000..1db6f6d2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/md5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 00000000..4d68b040 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/nil.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 00000000..b36324c2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/parse.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 00000000..6421c5d5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/regex.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 00000000..3da8673a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/rng.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 00000000..80062449 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 00000000..e23850b4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 00000000..f9bca120 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,29 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 00000000..ebf81acb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || stringify(b); +} + +export default v1; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v3.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 00000000..09063b86 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v35.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 00000000..22f6a196 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,64 @@ +import stringify from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function (name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return stringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v4.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 00000000..efad926f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,24 @@ +import rng from './rng.js'; +import stringify from './stringify.js'; + +function v4(options, buf, offset) { + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 00000000..e87fe317 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/validate.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 00000000..f1cdc7af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/version.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 00000000..77530e9c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +export default version; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/index.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/index.js new file mode 100644 index 00000000..bf13b103 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5-browser.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 00000000..7a4582ac --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5.js new file mode 100644 index 00000000..824d4816 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/nil.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/nil.js new file mode 100644 index 00000000..7ade577b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/parse.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/parse.js new file mode 100644 index 00000000..4c69fc39 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/regex.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/regex.js new file mode 100644 index 00000000..1ef91d64 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng-browser.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 00000000..91faeae6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,26 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng.js new file mode 100644 index 00000000..3507f937 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1-browser.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 00000000..24cbcedc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1.js new file mode 100644 index 00000000..03bdd63c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/stringify.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/stringify.js new file mode 100644 index 00000000..b8e75194 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/stringify.js @@ -0,0 +1,39 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuid.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuid.min.js new file mode 100644 index 00000000..639ca2f2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuid.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((r="undefined"!=typeof globalThis?globalThis:r||self).uuid={})}(this,(function(r){"use strict";var e,n=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(n)}var o=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function a(r){return"string"==typeof r&&o.test(r)}for(var i,u,f=[],s=0;s<256;++s)f.push((s+256).toString(16).substr(1));function c(r){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(f[r[e+0]]+f[r[e+1]]+f[r[e+2]]+f[r[e+3]]+"-"+f[r[e+4]]+f[r[e+5]]+"-"+f[r[e+6]]+f[r[e+7]]+"-"+f[r[e+8]]+f[r[e+9]]+"-"+f[r[e+10]]+f[r[e+11]]+f[r[e+12]]+f[r[e+13]]+f[r[e+14]]+f[r[e+15]]).toLowerCase();if(!a(n))throw TypeError("Stringified UUID is invalid");return n}var l=0,d=0;function v(r){if(!a(r))throw TypeError("Invalid UUID");var e,n=new Uint8Array(16);return n[0]=(e=parseInt(r.slice(0,8),16))>>>24,n[1]=e>>>16&255,n[2]=e>>>8&255,n[3]=255&e,n[4]=(e=parseInt(r.slice(9,13),16))>>>8,n[5]=255&e,n[6]=(e=parseInt(r.slice(14,18),16))>>>8,n[7]=255&e,n[8]=(e=parseInt(r.slice(19,23),16))>>>8,n[9]=255&e,n[10]=(e=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=e/4294967296&255,n[12]=e>>>24&255,n[13]=e>>>16&255,n[14]=e>>>8&255,n[15]=255&e,n}function p(r,e,n){function t(r,t,o,a){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],n=0;n>>9<<4)+1}function y(r,e){var n=(65535&r)+(65535&e);return(r>>16)+(e>>16)+(n>>16)<<16|65535&n}function g(r,e,n,t,o,a){return y((i=y(y(e,r),y(t,a)))<<(u=o)|i>>>32-u,n);var i,u}function m(r,e,n,t,o,a,i){return g(e&n|~e&t,r,e,o,a,i)}function w(r,e,n,t,o,a,i){return g(e&t|n&~t,r,e,o,a,i)}function b(r,e,n,t,o,a,i){return g(e^n^t,r,e,o,a,i)}function A(r,e,n,t,o,a,i){return g(n^(e|~t),r,e,o,a,i)}var U=p("v3",48,(function(r){if("string"==typeof r){var e=unescape(encodeURIComponent(r));r=new Uint8Array(e.length);for(var n=0;n>5]>>>o%32&255,i=parseInt(t.charAt(a>>>4&15)+t.charAt(15&a),16);e.push(i)}return e}(function(r,e){r[e>>5]|=128<>5]|=(255&r[t/8])<>>32-e}var R=p("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],n=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var t=unescape(encodeURIComponent(r));r=[];for(var o=0;o>>0;w=m,m=g,g=C(y,30)>>>0,y=h,h=U}n[0]=n[0]+h>>>0,n[1]=n[1]+y>>>0,n[2]=n[2]+g>>>0,n[3]=n[3]+m>>>0,n[4]=n[4]+w>>>0}return[n[0]>>24&255,n[0]>>16&255,n[0]>>8&255,255&n[0],n[1]>>24&255,n[1]>>16&255,n[1]>>8&255,255&n[1],n[2]>>24&255,n[2]>>16&255,n[2]>>8&255,255&n[2],n[3]>>24&255,n[3]>>16&255,n[3]>>8&255,255&n[3],n[4]>>24&255,n[4]>>16&255,n[4]>>8&255,255&n[4]]}));r.NIL="00000000-0000-0000-0000-000000000000",r.parse=v,r.stringify=c,r.v1=function(r,e,n){var o=e&&n||0,a=e||new Array(16),f=(r=r||{}).node||i,s=void 0!==r.clockseq?r.clockseq:u;if(null==f||null==s){var v=r.random||(r.rng||t)();null==f&&(f=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==s&&(s=u=16383&(v[6]<<8|v[7]))}var p=void 0!==r.msecs?r.msecs:Date.now(),h=void 0!==r.nsecs?r.nsecs:d+1,y=p-l+(h-d)/1e4;if(y<0&&void 0===r.clockseq&&(s=s+1&16383),(y<0||p>l)&&void 0===r.nsecs&&(h=0),h>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");l=p,d=h,u=s;var g=(1e4*(268435455&(p+=122192928e5))+h)%4294967296;a[o++]=g>>>24&255,a[o++]=g>>>16&255,a[o++]=g>>>8&255,a[o++]=255&g;var m=p/4294967296*1e4&268435455;a[o++]=m>>>8&255,a[o++]=255&m,a[o++]=m>>>24&15|16,a[o++]=m>>>16&255,a[o++]=s>>>8|128,a[o++]=255&s;for(var w=0;w<6;++w)a[o+w]=f[w];return e||c(a)},r.v3=U,r.v4=function(r,e,n){var o=(r=r||{}).random||(r.rng||t)();if(o[6]=15&o[6]|64,o[8]=63&o[8]|128,e){n=n||0;for(var a=0;a<16;++a)e[n+a]=o[a];return e}return c(o)},r.v5=R,r.validate=a,r.version=function(r){if(!a(r))throw TypeError("Invalid UUID");return parseInt(r.substr(14,1),16)},Object.defineProperty(r,"__esModule",{value:!0})})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidNIL.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidNIL.min.js new file mode 100644 index 00000000..30b28a7e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidNIL.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidNIL=n()}(this,(function(){"use strict";return"00000000-0000-0000-0000-000000000000"})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidParse.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidParse.min.js new file mode 100644 index 00000000..d48ea6af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidParse.min.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidParse=n()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(n){if(!function(n){return"string"==typeof n&&e.test(n)}(n))throw TypeError("Invalid UUID");var t,i=new Uint8Array(16);return i[0]=(t=parseInt(n.slice(0,8),16))>>>24,i[1]=t>>>16&255,i[2]=t>>>8&255,i[3]=255&t,i[4]=(t=parseInt(n.slice(9,13),16))>>>8,i[5]=255&t,i[6]=(t=parseInt(n.slice(14,18),16))>>>8,i[7]=255&t,i[8]=(t=parseInt(n.slice(19,23),16))>>>8,i[9]=255&t,i[10]=(t=parseInt(n.slice(24,36),16))/1099511627776&255,i[11]=t/4294967296&255,i[12]=t>>>24&255,i[13]=t>>>16&255,i[14]=t>>>8&255,i[15]=255&t,i}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidStringify.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidStringify.min.js new file mode 100644 index 00000000..fd39adc3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidStringify.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidStringify=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function t(t){return"string"==typeof t&&e.test(t)}for(var i=[],n=0;n<256;++n)i.push((n+256).toString(16).substr(1));return function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,f=(i[e[n+0]]+i[e[n+1]]+i[e[n+2]]+i[e[n+3]]+"-"+i[e[n+4]]+i[e[n+5]]+"-"+i[e[n+6]]+i[e[n+7]]+"-"+i[e[n+8]]+i[e[n+9]]+"-"+i[e[n+10]]+i[e[n+11]]+i[e[n+12]]+i[e[n+13]]+i[e[n+14]]+i[e[n+15]]).toLowerCase();if(!t(f))throw TypeError("Stringified UUID is invalid");return f}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidValidate.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidValidate.min.js new file mode 100644 index 00000000..378e5b90 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidValidate.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidValidate=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){return"string"==typeof t&&e.test(t)}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidVersion.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidVersion.min.js new file mode 100644 index 00000000..274bb090 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidVersion.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidVersion=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){if(!function(t){return"string"==typeof t&&e.test(t)}(t))throw TypeError("Invalid UUID");return parseInt(t.substr(14,1),16)}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv1.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv1.min.js new file mode 100644 index 00000000..2622889a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv1.min.js @@ -0,0 +1 @@ +!function(e,o){"object"==typeof exports&&"undefined"!=typeof module?module.exports=o():"function"==typeof define&&define.amd?define(o):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidv1=o()}(this,(function(){"use strict";var e,o=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(o)}var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(e){return"string"==typeof e&&n.test(e)}for(var i,u,s=[],a=0;a<256;++a)s.push((a+256).toString(16).substr(1));var d=0,f=0;return function(e,o,n){var a=o&&n||0,c=o||new Array(16),l=(e=e||{}).node||i,p=void 0!==e.clockseq?e.clockseq:u;if(null==l||null==p){var v=e.random||(e.rng||t)();null==l&&(l=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==p&&(p=u=16383&(v[6]<<8|v[7]))}var y=void 0!==e.msecs?e.msecs:Date.now(),m=void 0!==e.nsecs?e.nsecs:f+1,g=y-d+(m-f)/1e4;if(g<0&&void 0===e.clockseq&&(p=p+1&16383),(g<0||y>d)&&void 0===e.nsecs&&(m=0),m>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");d=y,f=m,u=p;var h=(1e4*(268435455&(y+=122192928e5))+m)%4294967296;c[a++]=h>>>24&255,c[a++]=h>>>16&255,c[a++]=h>>>8&255,c[a++]=255&h;var w=y/4294967296*1e4&268435455;c[a++]=w>>>8&255,c[a++]=255&w,c[a++]=w>>>24&15|16,c[a++]=w>>>16&255,c[a++]=p>>>8|128,c[a++]=255&p;for(var b=0;b<6;++b)c[a+b]=l[b];return o||function(e){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,t=(s[e[o+0]]+s[e[o+1]]+s[e[o+2]]+s[e[o+3]]+"-"+s[e[o+4]]+s[e[o+5]]+"-"+s[e[o+6]]+s[e[o+7]]+"-"+s[e[o+8]]+s[e[o+9]]+"-"+s[e[o+10]]+s[e[o+11]]+s[e[o+12]]+s[e[o+13]]+s[e[o+14]]+s[e[o+15]]).toLowerCase();if(!r(t))throw TypeError("Stringified UUID is invalid");return t}(c)}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv3.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv3.min.js new file mode 100644 index 00000000..8d37b62d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv3.min.js @@ -0,0 +1 @@ +!function(n,r){"object"==typeof exports&&"undefined"!=typeof module?module.exports=r():"function"==typeof define&&define.amd?define(r):(n="undefined"!=typeof globalThis?globalThis:n||self).uuidv3=r()}(this,(function(){"use strict";var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(r){return"string"==typeof r&&n.test(r)}for(var e=[],t=0;t<256;++t)e.push((t+256).toString(16).substr(1));function i(n){return 14+(n+64>>>9<<4)+1}function o(n,r){var e=(65535&n)+(65535&r);return(n>>16)+(r>>16)+(e>>16)<<16|65535&e}function a(n,r,e,t,i,a){return o((f=o(o(r,n),o(t,a)))<<(u=i)|f>>>32-u,e);var f,u}function f(n,r,e,t,i,o,f){return a(r&e|~r&t,n,r,i,o,f)}function u(n,r,e,t,i,o,f){return a(r&t|e&~t,n,r,i,o,f)}function c(n,r,e,t,i,o,f){return a(r^e^t,n,r,i,o,f)}function s(n,r,e,t,i,o,f){return a(e^(r|~t),n,r,i,o,f)}return function(n,t,i){function o(n,o,a,f){if("string"==typeof n&&(n=function(n){n=unescape(encodeURIComponent(n));for(var r=[],e=0;e>>24,t[1]=e>>>16&255,t[2]=e>>>8&255,t[3]=255&e,t[4]=(e=parseInt(n.slice(9,13),16))>>>8,t[5]=255&e,t[6]=(e=parseInt(n.slice(14,18),16))>>>8,t[7]=255&e,t[8]=(e=parseInt(n.slice(19,23),16))>>>8,t[9]=255&e,t[10]=(e=parseInt(n.slice(24,36),16))/1099511627776&255,t[11]=e/4294967296&255,t[12]=e>>>24&255,t[13]=e>>>16&255,t[14]=e>>>8&255,t[15]=255&e,t}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var u=new Uint8Array(16+n.length);if(u.set(o),u.set(n,o.length),(u=i(u))[6]=15&u[6]|t,u[8]=63&u[8]|128,a){f=f||0;for(var c=0;c<16;++c)a[f+c]=u[c];return a}return function(n){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,i=(e[n[t+0]]+e[n[t+1]]+e[n[t+2]]+e[n[t+3]]+"-"+e[n[t+4]]+e[n[t+5]]+"-"+e[n[t+6]]+e[n[t+7]]+"-"+e[n[t+8]]+e[n[t+9]]+"-"+e[n[t+10]]+e[n[t+11]]+e[n[t+12]]+e[n[t+13]]+e[n[t+14]]+e[n[t+15]]).toLowerCase();if(!r(i))throw TypeError("Stringified UUID is invalid");return i}(u)}try{o.name=n}catch(n){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v3",48,(function(n){if("string"==typeof n){var r=unescape(encodeURIComponent(n));n=new Uint8Array(r.length);for(var e=0;e>5]>>>i%32&255,a=parseInt(t.charAt(o>>>4&15)+t.charAt(15&o),16);r.push(a)}return r}(function(n,r){n[r>>5]|=128<>5]|=(255&n[t/8])<1&&void 0!==arguments[1]?arguments[1]:0,o=(i[t[e+0]]+i[t[e+1]]+i[t[e+2]]+i[t[e+3]]+"-"+i[t[e+4]]+i[t[e+5]]+"-"+i[t[e+6]]+i[t[e+7]]+"-"+i[t[e+8]]+i[t[e+9]]+"-"+i[t[e+10]]+i[t[e+11]]+i[t[e+12]]+i[t[e+13]]+i[t[e+14]]+i[t[e+15]]).toLowerCase();if(!r(o))throw TypeError("Stringified UUID is invalid");return o}(u)}})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv5.min.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv5.min.js new file mode 100644 index 00000000..ba6fc63d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/umd/uuidv5.min.js @@ -0,0 +1 @@ +!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(r="undefined"!=typeof globalThis?globalThis:r||self).uuidv5=e()}(this,(function(){"use strict";var r=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function e(e){return"string"==typeof e&&r.test(e)}for(var t=[],n=0;n<256;++n)t.push((n+256).toString(16).substr(1));function a(r,e,t,n){switch(r){case 0:return e&t^~e&n;case 1:return e^t^n;case 2:return e&t^e&n^t&n;case 3:return e^t^n}}function o(r,e){return r<>>32-e}return function(r,n,a){function o(r,o,i,f){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],t=0;t>>24,n[1]=t>>>16&255,n[2]=t>>>8&255,n[3]=255&t,n[4]=(t=parseInt(r.slice(9,13),16))>>>8,n[5]=255&t,n[6]=(t=parseInt(r.slice(14,18),16))>>>8,n[7]=255&t,n[8]=(t=parseInt(r.slice(19,23),16))>>>8,n[9]=255&t,n[10]=(t=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=t/4294967296&255,n[12]=t>>>24&255,n[13]=t>>>16&255,n[14]=t>>>8&255,n[15]=255&t,n}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var s=new Uint8Array(16+r.length);if(s.set(o),s.set(r,o.length),(s=a(s))[6]=15&s[6]|n,s[8]=63&s[8]|128,i){f=f||0;for(var u=0;u<16;++u)i[f+u]=s[u];return i}return function(r){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,a=(t[r[n+0]]+t[r[n+1]]+t[r[n+2]]+t[r[n+3]]+"-"+t[r[n+4]]+t[r[n+5]]+"-"+t[r[n+6]]+t[r[n+7]]+"-"+t[r[n+8]]+t[r[n+9]]+"-"+t[r[n+10]]+t[r[n+11]]+t[r[n+12]]+t[r[n+13]]+t[r[n+14]]+t[r[n+15]]).toLowerCase();if(!e(a))throw TypeError("Stringified UUID is invalid");return a}(s)}try{o.name=r}catch(r){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],t=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var n=unescape(encodeURIComponent(r));r=[];for(var i=0;i>>0;A=U,U=w,w=o(b,30)>>>0,b=g,g=C}t[0]=t[0]+g>>>0,t[1]=t[1]+b>>>0,t[2]=t[2]+w>>>0,t[3]=t[3]+U>>>0,t[4]=t[4]+A>>>0}return[t[0]>>24&255,t[0]>>16&255,t[0]>>8&255,255&t[0],t[1]>>24&255,t[1]>>16&255,t[1]>>8&255,255&t[1],t[2]>>24&255,t[2]>>16&255,t[2]>>8&255,255&t[2],t[3]>>24&255,t[3]>>16&255,t[3]>>8&255,255&t[3],t[4]>>24&255,t[4]>>16&255,t[4]>>8&255,255&t[4]]}))})); \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/uuid-bin.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 00000000..50a7a9f1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v1.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v1.js new file mode 100644 index 00000000..abb9b3d1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v3.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v3.js new file mode 100644 index 00000000..6b47ff51 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v35.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v35.js new file mode 100644 index 00000000..f784c633 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v35.js @@ -0,0 +1,78 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v4.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v4.js new file mode 100644 index 00000000..838ce0b2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v4.js @@ -0,0 +1,37 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v5.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v5.js new file mode 100644 index 00000000..99d615e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/validate.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/validate.js new file mode 100644 index 00000000..fd052157 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/version.js b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/version.js new file mode 100644 index 00000000..b72949cd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/package.json b/node_modules/@azure/ms-rest-js/node_modules/uuid/package.json new file mode 100644 index 00000000..f0ab3711 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "8.3.2", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.11.6", + "@babel/core": "7.11.6", + "@babel/preset-env": "7.11.5", + "@commitlint/cli": "11.0.0", + "@commitlint/config-conventional": "11.0.0", + "@rollup/plugin-node-resolve": "9.0.0", + "babel-eslint": "10.1.0", + "bundlewatch": "0.3.1", + "eslint": "7.10.0", + "eslint-config-prettier": "6.12.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.22.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "3.1.4", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "husky": "4.3.0", + "jest": "25.5.4", + "lint-staged": "10.4.0", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.1.2", + "random-seed": "0.3.0", + "rollup": "2.28.2", + "rollup-plugin-terser": "7.0.2", + "runmd": "1.3.2", + "standard-version": "9.0.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "6.4.0", + "@wdio/cli": "6.4.0", + "@wdio/jasmine-framework": "6.4.0", + "@wdio/local-runner": "6.4.0", + "@wdio/spec-reporter": "6.4.0", + "@wdio/static-server-service": "6.4.0", + "@wdio/sync": "6.4.0" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjs node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --ignore-path .prettierignore --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --ignore-path .prettierignore --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v12' ) && ( npm run build && runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/node_modules/@azure/ms-rest-js/node_modules/uuid/wrapper.mjs b/node_modules/@azure/ms-rest-js/node_modules/uuid/wrapper.mjs new file mode 100644 index 00000000..c31e9cef --- /dev/null +++ b/node_modules/@azure/ms-rest-js/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/node_modules/@azure/ms-rest-js/package.json b/node_modules/@azure/ms-rest-js/package.json new file mode 100644 index 00000000..ba25978b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/package.json @@ -0,0 +1,181 @@ +{ + "name": "@azure/ms-rest-js", + "author": { + "name": "Microsoft Corporation", + "email": "azsdkteam@microsoft.com", + "url": "https://github.com/Azure/ms-rest-js" + }, + "version": "2.6.1", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "main": "./dist/msRest.node.js", + "module": "./es/lib/msRest.js", + "types": "./es/lib/msRest.d.ts", + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "es/lib/**/*.js", + "es/lib/**/*.js.map", + "es/lib/**/*.d.ts", + "es/lib/**/*.d.ts.map", + "lib/**/!(dom.d).ts", + "dom-shim.d.ts", + "LICENSE", + "README.md", + "ThirdPartyNotices.txt" + ], + "browser": { + "./es/lib/policies/msRestUserAgentPolicy.js": "./es/lib/policies/msRestUserAgentPolicy.browser.js", + "./es/lib/policies/agentPolicy.js": "./es/lib/policies/agentPolicy.browser.js", + "./es/lib/policies/proxyPolicy.js": "./es/lib/policies/proxyPolicy.browser.js", + "./es/lib/util/base64.js": "./es/lib/util/base64.browser.js", + "./es/lib/util/xml.js": "./es/lib/util/xml.browser.js", + "./es/lib/defaultHttpClient.js": "./es/lib/defaultHttpClient.browser.js" + }, + "license": "MIT", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + }, + "devDependencies": { + "@azure/logger-js": "^1.1.0", + "@microsoft/api-extractor": "^7.18.11", + "@ts-common/azure-js-dev-tools": "^19.4.0", + "@types/chai": "^4.1.7", + "@types/express": "^4.17.0", + "@types/fetch-mock": "^7.3.1", + "@types/form-data": "^2.2.1", + "@types/glob": "^7.1.1", + "@types/karma": "^3.0.3", + "@types/mocha": "^5.2.7", + "@types/node": "^12.0.12", + "@types/node-fetch": "^2.3.7", + "@types/semver": "^6.0.1", + "@types/sinon": "^7.0.13", + "@types/tough-cookie": "^2.3.5", + "@types/tunnel": "0.0.1", + "@types/uuid": "^8.3.2", + "@types/webpack": "^4.4.34", + "@types/webpack-dev-middleware": "^2.0.3", + "@types/xml2js": "^0.4.4", + "abortcontroller-polyfill": "^1.3.0", + "chai": "4.3.4", + "cross-env": "^7.0.3", + "express": "^4.17.1", + "fetch-mock": "^7.3.3", + "glob": "^7.1.4", + "karma": "^4.1.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^2.2.0", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0", + "karma-sourcemap-loader": "^0.3.7", + "karma-typescript-es6-transform": "^4.1.1", + "karma-webpack": "^4.0.2", + "mocha": "^6.1.4", + "mocha-chrome": "^2.0.0", + "mocha-junit-reporter": "^1.23.0", + "mocha-multi-reporters": "^1.1.7", + "npm-run-all": "^4.1.5", + "nyc": "^14.1.1", + "prettier": "2.2.1", + "rollup": "^1.16.6", + "rollup-plugin-commonjs": "^10.0.1", + "rollup-plugin-json": "^4.0.0", + "rollup-plugin-multi-entry": "^2.1.0", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-resolve": "0.0.1-predev.1", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-visualizer": "^2.4.4", + "semver": "^6.2.0", + "shx": "^0.3.2", + "sinon": "^7.3.2", + "terser": "^4.0.2", + "ts-loader": "^6.0.4", + "ts-node": "^8.3.0", + "tslint": "^5.18.0", + "tslint-eslint-rules": "^5.4.0", + "typescript": "^3.5.2", + "webpack": "^4.35.2", + "webpack-cli": "^3.3.5", + "webpack-dev-middleware": "^3.7.0", + "xhr-mock": "^2.4.1", + "yarn": "^1.16.0" + }, + "homepage": "https://github.com/Azure/ms-rest-js", + "repository": { + "type": "git", + "url": "git@github.com:Azure/ms-rest-js.git" + }, + "bugs": { + "url": "http://github.com/Azure/ms-rest-js/issues" + }, + "scripts": { + "build": "run-p build:scripts build:lib", + "build:scripts": "tsc -p ./.scripts/", + "build:lib": "run-s build:tsc build:rollup build:minify-browser extract-api", + "build:tsc": "tsc -p tsconfig.es.json", + "build:rollup": "rollup -c rollup.config.ts", + "build:minify-browser": "terser -c -m --comments --source-map \"content='./dist/msRest.browser.js.map'\" -o ./dist/msRest.browser.min.js ./dist/msRest.browser.js", + "build:test-browser": "webpack --config webpack.testconfig.ts", + "extract-api": "api-extractor run --local", + "format": "prettier --write \"./**/*.ts\" *.json", + "test": "run-p test:tslint test:unit test:karma", + "test:tslint": "tslint -p .", + "test:unit": "cross-env TS_NODE_FILES=true nyc mocha", + "test:karma": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --browsers ChromeNoSecurity --single-run ", + "test:karma:debug": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers ChromeDebugging --debug --auto-watch", + "test:karma:debugff": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers FirefoxDebugging --debug --auto-watch", + "dep:autorest.typescript": "npx ts-node .scripts/testDependentProjects.ts autorest.typescript 'gulp build' 'gulp regenerate' 'npm run local'", + "dep:ms-rest-azure-js": "npx ts-node .scripts/testDependentProjects.ts ms-rest-azure-js", + "publish-preview": "mocha --no-colors && shx rm -rf dist/test && node ./.scripts/publish", + "local": "ts-node ./.scripts/local.ts", + "latest": "ts-node ./.scripts/latest.ts", + "prepack": "npm i && npm run build", + "check:packagejsonversion": "ts-node ./.scripts/checkPackageJsonVersion.ts", + "check:foronlycalls": "ts-node ./.scripts/checkForOnlyCalls.ts", + "check:everything": "ts-node ./.scripts/checkEverything.ts" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + } +} diff --git a/node_modules/@azure/storage-blob/CHANGELOG.md b/node_modules/@azure/storage-blob/CHANGELOG.md new file mode 100644 index 00000000..b160d240 --- /dev/null +++ b/node_modules/@azure/storage-blob/CHANGELOG.md @@ -0,0 +1,517 @@ +# Release History + +## 12.10.0 (2022-05-12) + +### Features Added + +- Includes all features released in 12.10.0-beta.1. + +### Bugs Fixed + +- Refined user-agent value to avoid failure when os information is not available on some platforms. +- Fix an issue of not returning raw blob properties in ContainerClient.listBlobsFlat() and ContainerClient.listBlobsByHierarchy(). + +## 12.10.0-beta.1 (2022-04-19) + +### Features Added + +- Added support for service version 2021-06-08. +- Added pageable output of PageBlobClient.listPageRanges() and PageBlobClient.listPageRangesDiff(). +- Added ability to copy source blob tags for BlobClient.syncCopyFromURL(). + +### Bugs Fixed + +- Fix incorrect browser mapping path for BufferScheduler.js +- Add `react-native` mapping to ESM entrypoint + +## 12.9.0 (2022-03-11) + +### Features Added + +- Includes all features released in 12.9.0-beta.1, 12.9.0-beta.2, 12.9.0-beta.3 and 12.9.0-beta.4. + +### Bugs Fixed + +- Fixed an issue of always sending x-ms-encryption-algorithm header in request. + +## 12.9.0-beta.4 (2022-03-04) + +### Features Added + +- Added ability to specify Disk Compute AAD Audience in StoragePipelineOptions. + +### Bugs Fixed + +- Set correct content length in requests for uploading operations to avoid unexpected failure if customized content length is incorrect. + +## 12.9.0-beta.3 (2022-02-11) + +### Features Added + +- Added support for service version 2021-04-10. +- Added support for finding blobs by tags in a container. + +### Bugs Fixed + +- Fixed a bug where customized `ProxyOptions` is overwrited by a default one when initializing `BlobServiceClient`, `BlobClient`, `AppendBlobClient`, `BlockBlobClient`, `PageBlobClient` or `ContainerClient` with connection string. + +## 12.9.0-beta.2 (2021-12-03) + +### Features Added + +- Added support for service version 2021-02-12 +- Added support for listing system containers with `BlobServiceClient.listContainers()`. +- Added support for blob names container invalid XML characters. + +## 12.9.0-beta.1 (2021-11-09) + +### Features Added + +- Added support for service version 2020-12-06. +- Added support for Encryption Scope SAS. +- Added support for Encryption Scopes with BlobBaseClient.SyncCopyFromUriAsync(). +- Added support for generating SAS URLs with the Permanent Delete ('y') SAS permission. + +## 12.8.0 (2021-09-10) + +### Features Added + +- Includes all features released in 12.8.0-beta.1. + +## 12.7.0 (2021-08-02) + +- Support for Node.js 8 and IE 11 has been dropped. Please see our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features +- Updated our internal core package dependencies to their latest versions in order to add support for Opentelemetry 1.0.0 which is compatible with the latest versions of our other client libraries. + +## 12.8.0-beta.1 (2021-07-28) + +### Features Added + +- Added support for service version 2020-10-02. +- Added support for Immutable Storage with Versioning + - Added BlobClient.setImmutibilityPolicy() + - Added BlobClient.seleteImmutabilityPolicy() + - Added BlobClient.setLegalHold() +- Added support for listing deleted root blobs with versions `ContainerClient.listBlobFlat()` and `ContainerClient.listBlobHierarchy()`. +- Added support for OAuth copy sources for synchronous copy operations. +- Added support for Parquet as an input format in `BlockBlobClient.query()`. +- With the dropping of support for Node.js versions that are no longer in LTS, the dependency on `@types/node` has been updated to version 12. Read our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +## 12.6.0 (2021-06-09) + +- Includes all features released in 12.6.0-beta.1. + +## 12.6.0-beta.1 (2021-05-14) + +- Updated Azure Storage Service API version to 2020-08-04. +- Restoring deleted container doesn't support renaming anymore, deprecated `destinationContainerName` in `ServiceUndeleteContainerOptions` for `BlobServiceClient.undeleteContainer()`. + +## 12.5.0 (2021-03-10) + +- Includes all features released in 12.5.0-beta.1. + +## 12.5.0-beta.1 (2021-02-09) + +- Now support Batch operations scoped to the Container level. You can use `ContainerClient.getBlobBatchClient()` to get such a `BlobBatchClient`. + +## 12.4.1 (2021-02-03) + +- Fixed a compile failure due to "Can't resolve 'crypto'" in Angular. [Issue #13267](https://github.com/Azure/azure-sdk-for-js/issues/13267). +- Fixed an issue that the download stream returned by `BlobClient.download` won't release underlying resources unless it's fully consumed. [Isssue #11850](https://github.com/Azure/azure-sdk-for-js/issues/11850). +- Fixed an error when listing blob with a metadata key of `_` [issue #9197](https://github.com/Azure/azure-sdk-for-js/issues/9171) +- The `"Unclosed root tag"` XML parser error is now retriable. [PR #13076](https://github.com/Azure/azure-sdk-for-js/pull/13076). + +## 12.4.0 (2021-01-12) + +- Added a new `from(permissionLike)` function to `AccountSASPermissions`, `BlobSASPermissions` and `ContainerSASPermissions` for creating such a permission from a raw permission-like object. Addressed issue [9714](https://github.com/Azure/azure-sdk-for-js/issues/9714). + +## 12.4.0-beta.1 (2020-12-09) + +- Updated Azure Storage Service API version to 2020-04-08. +- Added a new interface `BlockBlobClient.syncUploadFromURL()` to support creating a new Block Blob where the contents of the blob are read from a given URL. +- Blob Tags updates: `BlobClient.setTags()` and `BlobClient.getTags()` now support the `LeaseAccessConditions` and `BlobServiceClient.findBlobsByTags()` will return all matching tags for each blob. +- Added `generateSasUrl` to `BlobClient` and `ContainerClient` to generate a service-level SAS URI for the client. +- Added `generateAccountSasUrl` to `BlobServiceClient` to generate an account-level SAS URI for the client. +- Fixed a bug where the `credential` property of the `StorageClient` is not set correctly when using a Token credential. Fixed bug [12219](https://github.com/Azure/azure-sdk-for-js/issues/12219). +- Blob Batch operations now reorder the subresponses in the client side to perserve the original input order. See `BlobBatchClient.submitBatch()` and [12335](https://github.com/Azure/azure-sdk-for-js/issues/12335). +- Won't remove the first space in the `userAgentOptions.userAgentPrefix` passed to the `newPipeline()` now. Fixed bug [7536](https://github.com/Azure/azure-sdk-for-js/issues/7536). +- Added `isHierarchicalNamespaceEnabled` to the response of `BlobServiceClient.getAccountInfo()`. + +## 12.3.0 (2020-11-10) + +- Added `BlockBlobClient.uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options)` for parallel uploading. It's avaiable in both Node.js and browsers. +- Added new SAS permissions Move(m) and Execute(e) for Blob and Container. Also supports specifying an authorized object ID via `saoid` and a correlation ID via `scid` for user delegation SAS. + +## 12.3.0-beta.1 (2020-10-13) + +- Updated Azure Storage Service API version to 2020-02-10. +- Added support for Blob Last Access Time tracking. +- Added support for Blob Query Arrow output format. +- Added support for Container Soft Delete. + +## 12.2.1 (2020-09-17) + +- Bug fix - Fixes an issue where`BlockBlobClient.uploadStream()` will give an "Invalid Verb" error when keep-alive is enabled. Fixed bug [11187](https://github.com/Azure/azure-sdk-for-js/issues/11187). + +## 12.2.0 (2020-09-08) + +- Added RehydratePriority to BlobProperties and BlobItemProperties. +- Fixed `BlockBlobClient.uploadStream()` to support `bufferSize` larger than `buffer.constants.MAX_LENGTH`. +- Added support for Object Replication Service. +- Added custom domain support. +- Supported Append Blob Seal. +- Supported tags conditional operations. +- The Static Website Service now supports a DefaultIndexDocumentPath for a global HTTP 200 route within the static website. You can get it by `BlobServiceClient.getProperties()` and set it via `blobServiceClient.setProperties()`. +- Bug fix - `credential` parameter of `newPipeline()` function is now optional. If not specified, `AnonymousCredential` is used. Fixes bug [9628](https://github.com/Azure/azure-sdk-for-js/issues/9628). +- Bug fix - high level upload functions `BlockBlobClient.uploadFile()`, `BlockBlobClient.uploadStream()` and `BlockBlobClient.uploadBrowserData()` now support setting tier. Fixes bug [9062](https://github.com/Azure/azure-sdk-for-js/issues/9062). +- Optimized error semantic for `listBlobsByHierarchy()`. Using `listBlobsByHierarchy()` with empty `delimiter` will now fail-fast. +- Bug fix - Content-Length header is no more ignored. Fixes bugs [8903](https://github.com/Azure/azure-sdk-for-js/issues/8903), [9300](https://github.com/Azure/azure-sdk-for-js/issues/9300) and [10614](https://github.com/Azure/azure-sdk-for-js/issues/10614). + +## 12.2.0-preview.1 (2020-07-03) + +- Updated Azure Storage Service API version to 2019-12-12. +- Supported quick query. Added a new API `BlockBlobClient.query()`. +- Supported blob versioning. +- Supported blob tags. +- Increased the maximum block size for Block Blob from 100MiB to 4000MiB(~4GB). And thereby supporting ~200TB maximum size for Block Blob. +- Added convenience method `createIfNotExists` for `ContainerClient`, `AppendBlobClient` and `PageBlobClient`. +- Added convenience method `deleteIfExists` for `ContainerClient` and `BlobClients`. + +## 12.1.2 (2020-05-20) + +- Fix data corruption failure error [issue #6411](https://github.com/Azure/azure-sdk-for-js/issues/6411) when downloading compressed files. [PR #7993](https://github.com/Azure/azure-sdk-for-js/pull/7993) +- Fix un-handled TypeError [issue #8499](https://github.com/Azure/azure-sdk-for-js/issues/8499) in Electron applications. [PR #8568](https://github.com/Azure/azure-sdk-for-js/pull/8568) +- Updated to use `@opentelemetry/api` 0.6.1 via `@azure/core-tracing`. [PR #7998](https://github.com/Azure/azure-sdk-for-js/pull/7998) +- Updated to use `typescript` 3.8.3. [PR #8659](https://github.com/Azure/azure-sdk-for-js/pull/8659) + +## 12.1.1 (2020-03-12) + +- Bug fix - Blob SAS's `sr` field is now properly set when generating SAS for a blob using a stored policy with `signedpermissions`. For more details about Service SAS, please refer to [link](https://docs.microsoft.com/rest/api/storageservices/create-service-sas). +- Fixed unexpected hang issue when uploading empty body. Fixed bug [6904](https://github.com/Azure/azure-sdk-for-js/issues/6904). + +## 12.1.0 (2020-02-12) + +- Updated Azure Storage Service API version to 2019-07-07. +- Add support for Encryption Scope, which is similar to regular CPK, except the encryption key is provided by name. +- MD5 and CRC headers for Blob Copy: add `contentMD5`and `xMsContentCrc64` to `BlobCopyFromURLHeaders` and `sourceContentMD5` to `BlobSyncCopyFromURLOptions`. +- Add `getPageRangesDiffForManagedDisks` to `PageBlobClient`. +- Fixed a bug where the package didn't work as expected when bundling web applications. [PR #7298](https://github.com/Azure/azure-sdk-for-js/pull/7298) + +## 12.0.2 (2020-01-09) + +- Bug fix - Name properties on clients now support more kinds of endpoints(IPv4/v6 hosts, single word domains). [PR #6753](https://github.com/Azure/azure-sdk-for-js/pull/6753) +- Service clients now share a single http client instance by default. [PR #6657](https://github.com/Azure/azure-sdk-for-js/pull/6657) + + Previously, a new http client was created for each service client if none was provided by the user. This could result in TCP port exhaustion under heavy usage with the keepAlive option enabled because each http client has its own persistent TCP connection. This change creates a single http client instance which is shared among all service clients by default. + +## 12.0.1 (2019-12-04) + +- Updated to use OpenTelemetry 0.2 via `@azure/core-tracing` +- Bug Fix - Convert empty prefixes (`""`) to `undefined` when passed as options to the `listContainers`, `listBlobsFlat`, and `listBlobsByHierarchy` methods to avoid sending an invalid request to the service. Fixes bug [5817](https://github.com/Azure/azure-sdk-for-js/issues/5817). +- Added a warning to the documentation of `downloadToBuffer` that explains the limitations of Node.js `Buffer` sizes to around 2GB on 64-bit architectures and 1GB on 32-bit architectures. +- Documented the behavior of `getProperties` methods with respect to metadata keys and their casing inconsistency when compared to the metadata keys returned through corresponding "list" methods with the `includeMetadata` option. + +## 12.0.0 (2019-10-30) + +- This release marks the general availability of the `@azure/storage-blob` package. +- Bug Fix - Previous versions of `@azure/storage-blob` preview library failed for React apps because of the usage of `fs.stat` method which is not available in browsers and due to the presence of some circular dependencies. Both of these issues are fixed in this new release. +- [Breaking] The custom browser and retry policies that are specific to the Storage libraries have been + renamed to have the `Storage` prefix. [PR 5862](https://github.com/Azure/azure-sdk-for-js/pull/5862). + Below are the entities that now have the Storage prefix + - BrowserPolicy + - BrowserPolicyFactory + - RetryPolicy + - RetryPolicyType + - RetryOptions + - RetryPolicyFactory +- [Breaking] `LeaseClient` is renamed to `BlobLeaseClient`. The helper method `getLeaseClient` on both `BlobClient` and `ContainerClient` is renamed to `getBlobLeaseClient`. +- [Breaking] The properties in the `StoragePipelineOptions` interface have been updated as below: + - The `proxy` property of type `ProxySettings | string` has been renamed to `proxyOptions` and + will be of type `ProxyOptions`. If you have been passing url directly, split the value into `host` + and `port` then pass it as a json object. + - The `telemetry` property of type `TelemetryOptions` has been renamed to `userAgentOptions` of + type `UserAgentOptions`. + - The `logger` is no longer a property available to configure. To enable logging, please see the + [Troubleshooting](https://github.com/Azure/azure-sdk-for-js/blob/0ddc2f3c3d4658b20d96910acc37a77e5209e5e3/sdk/storage/storage-blob/README.md#troubleshooting) section of our readme. + - The `UniqueRequestIdPolicy` and `KeepAlivePolicy` are no longer exported from this library. The + corresponding policies from the `@azure/core-http` library are meant to be used instead. +- `beginCopyFromURL` is added to the `BlobClient`, it returns a poller that can be used to watch the status of a copy operation. It also supports cancelling a pending copy. +- Updates to `BlockBlobClient.uploadStream` + - [Breaking] `maxBuffers` attribute of is renamed to `maxConcurrency` + - Added default values for parameters, bufferSize = `8MB` and maxConcurrency = `5` +- [Breaking] Bug Fix - The page object returned from `ContainerClient.listContainers` had its `containerItems` property set to an empty string instead of an empty array if the storage account has no blob containers. The issue is fixed in this new release. +- `BlobClient.downloadToBuffer()` helper method has a new overload where it is not required to pass the `Buffer`. Attributes `offset` and `count` are optional, downloads the entire blob if they are not provided. +- [Breaking] The default browser bundle has been removed from the npm package. Bundling your application with a bundler such as Webpack is the recommended approach to building a browser bundle. For details on how to do this, please refer to our [bundling documentation](https://aka.ms/AzureSDKBundling). + +## 12.0.0-preview.5 (2019-10-22) + +- [Breaking] `IPRange` is renamed to `SasIPRange`. [PR #5551](https://github.com/Azure/azure-sdk-for-js/pull/5551) +- Created new interface `CommonOptions`. This interface is for standard options that apply to all methods that invoke remote operations. This interface currently contains options that enable client-side tracing of the SDK. [PR #5550](https://github.com/Azure/azure-sdk-for-js/pull/5550) +- [Breaking] `Models` is no longer exported in public API surface. Instead generated model types required by the public API are explicitly re-exported. In the case where convenience layer already defined a type with conflicting name, the model type is aliased with `Model` suffix. [PR #5567](https://github.com/Azure/azure-sdk-for-js/pull/5567) +- [Breaking] Cancelling an operation now throws a standardized error with the name `AbortError`. [PR #5633](https://github.com/Azure/azure-sdk-for-js/pull/5663) +- [Breaking] `blobName` on `AppendBlobClient`, `BlobClient`, `BlockBlobClient` and `PageBlobClient` is renamed to `name`. [PR #5613](https://github.com/Azure/azure-sdk-for-js/pull/5613) +- [Breaking] New `BlobBatchClient` allowing batched requests to the Azure Storage Blob service. [PR #5634](https://github.com/Azure/azure-sdk-for-js/pull/5634) + - Renamed `BatchRequest` to `BlobBatch`, flattened `BatchDeleteRequest` and `BatchSetTierRequest` into `BlobBatch` + - Moved `submitBatch` code from `BlobServiceClient` into new `BlobBatchClient`, created new `deleteBlobs` and `setBlobsAccessTier` helpers on `BlobBatchClient` + `BlobBatchClient` contains `setBlobsAccessTier`, `submitBatch` and `deleteBlobs` helper methods. `BlobBatch` represents an aggregated set of operations on blobs, `delete` and `setAccessTier` functionalities are supported currently. +- [Breaking] Flattened the conditions type `BlobRequestConditions` instead of current nested one. It replaces `ContainerAccessConditions` and `BlobAccessConditions`. + In addition, various conditions fields are renamed into simply `conditions` except `sourceModifiedAccessConditions` which is renamed to `sourceConditions`. + This makes it more convenient to pass in conditional request options. [PR #5672](https://github.com/Azure/azure-sdk-for-js/pull/5672). + + An example: + + ```js + { + blobAccessConditions: { + modifiedAccessConditions: { + ifMatch: uploadResponse.eTag + } + } + ``` + + turns into + + ```js + { + conditions: { + ifMatch: uploadResponse.eTag + } + ``` + +- [Breaking] `eTag` attribute is renamed to `etag`. [PR #5674](https://github.com/Azure/azure-sdk-for-js/pull/5674) +- [Breaking] `body` field from `RestError` Object in core-http Library is removed, the `response` property on the error will now have the `parsedBody` & `headers` along with raw body & headers that are already present. PRs [#5670](https://github.com/Azure/azure-sdk-for-js/pull/5670), [#5437](https://github.com/Azure/azure-sdk-for-js/pull/5437) + - Errors from the storage service can be seen in an extra field `details` with the expected error code. [#5688](https://github.com/Azure/azure-sdk-for-js/pull/5688) +- [Breaking] `progress` callback in the option bags of all the helper methods is renamed to `onProgress`. [PR #5676](https://github.com/Azure/azure-sdk-for-js/pull/5676) +- [Breaking] Consolidated `PageRange` and `ClearRange` types. They now have `offset` and `count` attributes as opposed to the older `start` and `end` attributes. + [PR #5632](https://github.com/Azure/azure-sdk-for-js/pull/5632) +- [Breaking] Type of the `permissions` attribute in the options bag `BlobSASSignatureValues` to be passed into `generateBlobSASQueryParameters` is changed to `BlobSASPermissions` from type `string`. [PR #5626](https://github.com/Azure/azure-sdk-for-js/pull/5626) + - Similarly, `AccountSASPermissions` for `generateAccountSASQueryParameters` instead of type `string`. + - Example - permissions attribute in `generateBlobSASQueryParameters` + - `permissions: BlobSASPermissions.parse("racwd").toString()` changes to `BlobSASPermissions.parse("racwd")` +- Renames for following Options interfaces. [PR #5650](https://github.com/Azure/azure-sdk-for-js/pull/5650) + - `DownloadFromBlobOptions` -> `BlobDownloadToBufferOptions`, + - `UploadStreamToBlockBlobOptions` -> `BlockBlobUploadStreamOptions`, + - `UploadToBlockBlobOptions` -> `BlockBlobParallelUploadOptions` +- [Breaking] Appropriate attribute renames in all the interfaces. PRs [#5580](https://github.com/Azure/azure-sdk-for-js/pull/5580),[#5630](https://github.com/Azure/azure-sdk-for-js/pull/5630) + - Example - `nextMarker` -> `continuationToken`, `HTTPClient` -> `HttpClient`, `permission` -> `permissions`, `parallelism` -> `concurrency` +- Bug fix - Name properties on clients now support the Emulator. [PR #5557](https://github.com/Azure/azure-sdk-for-js/pull/5557) + - emulator url when the blobEndpoint is `http://127.0.0.1:10000/devstoreaccount1` supported + - emulator connection string shorthands are supported + - `UseDevelopmentStorage=true` + - (with proxyURI) `UseDevelopmentStorage=true;DevelopmentStorageProxyUri=proxyURI` +- [Breaking] `encrypted` attribute is removed from `BlobMetadata` interface. [PR #5612](https://github.com/Azure/azure-sdk-for-js/pull/5612) +- [Breaking] Return type of `downloadToBuffer` helper method on `BlobClient` is changed to `Promise` from `Promise` [PR #5624](https://github.com/Azure/azure-sdk-for-js/pull/5624) +- [Breaking] IE11 needs `Object.assign` polyfill loaded. [PR #5727](https://github.com/Azure/azure-sdk-for-js/pull/5727) + +## 12.0.0-preview.4 (2019-10-09) + +- [Breaking] Replace string array with boolean flags to specify dataset to include when listing containers or blobs. + - For listing containers + Before this change the option is specified as + ```js + blobServiceClient.listContainers({ + include: "metadata", + }); + ``` + After this change: + ```js + blobServiceClient.listContainers({ + includeMetadata: true, + }); + ``` + - For listing blobs + Before this change the option is specified as + ```js + containerClient.listBlobsFlat({ + include: ["snapshots", "metadata", "uncommittedblobs", "copy", "deleted"], + }); + ``` + After this change: + ```js + containerClient.listBlobsFlat({ + includeCopy: true, + includeDeleted: true, + includeMetadata: true, + includeSnapshots: true, + includeUncommitedBlobs: true, + }); + ``` +- [Breaking] `BlobClient.setTier()` is renamed to `BlobClient.setAccessTier()`. +- [Breaking] Fixed typo - `chanageLease` -> `changeLease`, a method on `LeaseClient`. +- Library tries to load the proxy settings from the environment variables like HTTP_PROXY if the proxy settings are not provided when clients like `BlobServiceClient` or `BlobClient` are instantiated. +- Added development connection string support to connect to the storage emulator [Azurite - Extension for VS Code](https://marketplace.visualstudio.com/items?itemName=Azurite.azurite) + - Development Connection String + - `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;` + - Shorthand notation is also supported + - `UseDevelopmentStorage=true` (or `UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri`) +- Added name properties on all the clients for convenience. + - `accountName` is added to `AppendBlobClient`, `BlobClient`, `BlobServiceClient`, `BlockBlobClient`, `ContainerClient` and `PageBlobClient`. + - `containerName` is added to `AppendBlobClient`, `BlobClient`, `BlockBlobClient`, `ContainerClient` and `PageBlobClient`. + - `blobName` is added to `AppendBlobClient`, `BlobClient`, `BlockBlobClient` and `PageBlobClient`. +- [Breaking] `Models.StorageServiceProperties` is renamed to `Models.BlobServiceProperties` +- [Breaking] `Models.StorageServiceStats` is renamed to `Models.BlobServiceStatistics` +- [Breaking] `UserDelegationKey.signedOid` is renamed to `UserDelegationKey.signedObjectId`. `UserDelegationKey.signedTid` is renamed to `UserDelegationKey.signedTenantId`. + +## 12.0.0-preview.3 (2019-09-11) + +- [Breaking] `RawTokenCredential` is dropped. TokenCredential implementations can be found in the [@azure/identity](https://www.npmjs.com/package/@azure/identity) library for authentication. +- Updated Azure Storage Service API version to 2019-02-02. +- A new API `BlobServiceClient.submitBatch()` supports Blob Batch operation which allows multiple requests to be sent within a single request body. +- Added support for customer provided encryption key. +- Added support for rehydrate priority with additional option to methods `BlobClient.startCopyFromURL()` and `BlobClient.setTier()`. +- APIs `BlobClient.startCopyFromURL()`, `BlockBlobClient.upload()`, `BlockBlobClient.commitBlockList()` and `PageBlobClient.create()` now support set the blob tier within the API call. +- Responses for all APIs now return x-ms-client-request-id through `clientRequestId` that was passed in on the request from client-side. +- Exposed options to accept CRC64 as a transactional data integrity mechanism for data transfer APIs. +- Added overloads of `generateBlobSASQueryParameters` functions to generate user delegation SAS. +- `expiry` and `start` in `AccessPolicy` are now optional in `ContainerClient.setAccessPolicy` and `ContainerClient.getAccessPolicy`. +- Exported `HttpRequestBody` type for who wants to implement a customized HTTP client. +- Fixed a bug of `BlobClient.downloadToBuffer()` when provided offset is not 0. +- Fixed a bug that `Aborter` cannot work during retry interval. +- Fixed a bug that `Aborter` throws timeout error even though it succeeds of `BlockBlobClient.download()`. +- Fixed a bug that "err.code.toUpperCase is not a function" when retries in browser. +- Export `RetryPolicyType`. +- `Aborter` doesn't require `dom` as tsconfig lib requirement anymore for `Event` type. +- Service SAS generation now supports snapshot access control from 2018-11-09 in `generateBlobSASQueryParameters()`. +- Service SAS generation now supports snapshot access control from API version 2018-11-09 in `generateBlobSASQueryParameters()`. +- A new API `PageBlobClient.uploadPagesFromURL()` allows pages in a page blob to be written using a range of another blob as a source. This permits synchronous server-side copies to be orchestrated for page blobs of any size. +- A new API `AppendBlobClient.appendBlockFromURL()` commits a new block of data to the end of an append blob. Method uses a range of another blob as a source. This permits synchronous server-side copies to be orchestrated for append blobs of any size. +- A new API `BlobClient.syncCopyFromURL()` allows a block blob to be copied synchronously using a URL as a source. This API has a maximum size of 256 MB and preserves metadata and block list. +- A new API `BlobServiceClient.getUserDelegationKey()` added to get a key that can be used to generate a user delegation SAS (shared access signature). +- Updated HTTP client from axios to node-fetch in Node.js runtime. +- A new option `keepAliveOptions` added to parameter of `newPipeline()` which controls keep-alive configurations. Keep-alive is enabled by default. +- Pass through `options.abortSignal` to the optional `abortSignal` attribute in option bags instead of using `AbortSignal.none` as the default value when `options.abortSignal` is not specified. +- Basic HTTP proxy authentication support is added. Proxy settings can be passed in the options while creating a new client. Example - [typescript/proxyAuth.ts](https://github.com/Azure/azure-sdk-for-js/blob/@azure/storage-blob_12.0.0-preview.3/sdk/storage/storage-blob/samples/typescript/proxyAuth.ts) +- Connection strings for explicit storage endpoints are supported. - [Configure Azure Storage connection strings](https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string#create-a-connection-string-for-an-explicit-storage-endpoint) + +## 10.5.0 (2019-09-09) + +- Updated Azure Storage Service API version to 2019-02-02. +- A new API `ServiceURL.submitBatch()` supports Blob Batch operation which allows multiple requests to be sent within a single request body. +- Added support for customer provided encryption key. +- Added support for rehydrate priority with additional option to methods `BlobURL.startCopyFromURL()` and `BlobURL.setTier()`. +- APIs `BlobURL.startCopyFromURL()`, `BlockBlobURL.upload()`, `BlockBlobURL.commitBlockList()` and `PageBlobURL.create()` now support set the blob tier within the API call. +- Responses for all APIs now return x-ms-client-request-id through `clientRequestId` that was passed in on the request from client-side. +- Exposed options to accept CRC64 as a transactional data integrity mechanism for data transfer APIs. + +## 10.4.1 (2019-08-28) + +- Added overloads of `generateBlobSASQueryParameters` functions to generate user delegation SAS. +- `expiry` and `start` in `AccessPolicy` are now optional in `ContainerURL.setAccessPolicy` and `ContainerURL.getAccessPolicy`. + +## 12.0.0-preview.2 (2019-08-06) + +- [Breaking] Aborter class is no longer exposed from the package. Use the package [@azure/abort-controller](https://www.npmjs.com/package/@azure/abort-controller) to pass an abort signal to any of the async operations. + `AbortController.timeout()` can be utitlized as an abort signal. +- Generalized the credential parameter in client constructors to support `{SharedKeyCredential | AnonymousCredential | TokenCredential}` credentials as a union type. +- Storage service allows SAS connection string with SAS string and endpoints along with the Account connection string(account name, key and endpoint). + In this preview, SAS connection string support is added to the existing connection string client constructors and static methods. + - Account connection string example - `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + - SAS connection string example - `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + - SAS connection string is supported in both NodeJS and browser runtimes unlike the Account Connection String which is supported only in the NodeJS runtime. + +## 10.4.0 (2019-07-30) + +- Updated Azure Storage Service API version to [2018-11-09](https://docs.microsoft.com/rest/api/storageservices/version-2018-11-09). +- Improved comments for `BlockBlobURL.upload()`. +- Exported `HttpRequestBody` type for who wants to implement a customized HTTP client. +- Fixed a bug of `downloadBlobToBuffer()` and `downloadAzureFileToBuffer()` when provided offset is not 0. +- Fixed a bug that `Aborter` cannot work during retry interval. +- Fixed a bug that `Aborter` throws timeout error even though it succeeds of `BlockBlobUrl.download()`. +- Fixed a bug that "err.code.toUpperCase is not a function" when retries in browser. +- Export `RetryPolicyType`. +- `Aborter` doesn't require `dom` as tsconfig lib requirement anymore for `Event` type. +- Service SAS generation now supports snapshot access control from 2018-11-09 in `generateBlobSASQueryParameters()`. +- Service SAS generation now supports snapshot access control from API version 2018-11-09 in `generateBlobSASQueryParameters()`. +- A new API `PageBlobURL.uploadPagesFromURL()` allows pages in a page blob to be written using a range of another blob as a source. This permits synchronous server-side copies to be orchestrated for page blobs of any size. +- A new API `AppendBlobURL.appendBlockFromURL()` commits a new block of data to the end of an append blob. Method uses a range of another blob as a source. This permits synchronous server-side copies to be orchestrated for append blobs of any size. +- A new API `BlobURL.syncCopyFromURL()` allows a block blob to be copied synchronously using a URL as a source. This API has a maximum size of 256 MB and preserves metadata and block list. +- A new API `ServiceURL.getUserDelegationKey()` added to get a key that can be used to generate a user delegation SAS (shared access signature). +- Updated HTTP client from axios to node-fetch in Node.js runtime. +- A new option `keepAliveOptions` added to parameter of `StorageURL.newPipeline()` which controls keep-alive configurations. Keep-alive is enabled by default. + +## 12.0.0-preview.1 (2019-07-03) + +- [Breaking] Client types are renamed from *URL to *Client. + BlobURL, BlockBlobURL, ContainerURL, ServiceURL, StorageURL to BlobClient, BlockBlobClient, ContainerClient, BlobServiceClient, StorageClient respectively. +- [Breaking] Aborter parameters are now moved into option bags. + - `abortSignal` attrubute(optional) in the option-bag of respective module has to be utitlized for the `Aborter.timeout()` functionality. + - `Aborter.none` is the default value. +- [Breaking] I- prefixes are removed from interface names + - Example- `IBlobDownloadOptions` is updated to `BlobDownloadOptions`, the new names must to be used. +- [Breaking] The static methods to create client types are removed. The functionality is moved into new instance methods added to the parent clients. +- [Breaking] The telemetry strings have been updated. + - `Azure-Storage/${SDK_VERSION}` is updated to `azsdk-js-storagefile/${SDK_VERSION}`. +- [Breaking] withPipeline method is removed. +- Async iterators with pagination support are added for listing methods + - `listContainers()`, `listBlobsFlat()` and `listBlobsByHierarchy()` + - Please refer to the samples for async iterators in the `samples` folder. +- [Breaking] Methods that list segments(`listBlobFlatSegment()` and `listContainersSegment()`) are no longer exposed in public api. +- [Breaking] High level convenience functions are moved into clients as their instance member function. + - `uploadFileToBlockBlob()`, `uploadStreamToBlockBlob()` and `uploadBrowserDataToBlockBlob()` -> `BlockBlobClient.uploadFile()`, `BlockBlobClient.uploadStream()` and `BlockBlobClient.uploadBrowserData()` respectively + - `downloadBlobToBuffer()` -> `BlobClient.downloadToBuffer()` +- [Breaking] `StorageClient` is no longer exposed. `StorageClient.newPipeline()` static method is moved to the top level exported function `newPipeline()`. +- [Breaking] `TokenCredential` has been renamed to `RawTokenCredential` to make way for the new `@azure/identity` library's `TokenCredential` interface. +- [Breaking] Blob/Container member methods that manage leases are removed. A new type `LeaseClient` is added to manage leases. +- Updated dependency `@azure/ms-rest-js` to `@azure/core-http`. +- Constructor overloads added into client types so they can be constructed from a url and a pipeline/credential and connection string. + - Constructors with overloads - `AppendBlobClient`, `BlobClient`, `BlobServiceClient`, `BlockBlobClient`, `ContainerClient` and `PageBlobClient` + - Connection string method is supported only in Node.js (not browsers). +- Creation/Deletion of child resources are duplicated to parent client type. +- HTTP proxy support is added (Node.js only). + - Please refer to the `proxyAuth.ts` sample in the `samples/v12/typescript` folder. +- Request and response headers are now logged at INFO level, with sensitive data redacted. +- `downloadToFile()` is added to `BlobClient`. +- Exported `HttpRequestBody` type to allow implementation of a customized HTTP client. + +For release notes and more information please visit https://aka.ms/azsdk/releases/july2019preview + +## 10.3.0 (2018-12-27) + +- [Breaking] Updated convenience layer methods enum type parameters into typescript union types, this will help reducing bundle footprint. +- [Breaking] Updated URL encoding strategy for `url` parameters of `new XXXURL(url, pipeline)` methods, such as `new BlobURL(url, pipeline)`. + - URL will accept both encoded or non-encoded URL string. It will escape non-escaped special characters, like Chinese characters. However, if blob name includes `?` or `%`, `url` must be encoded manually. +- [Breaking] `SASQueryParameters` is not going to be exported in browser bundle, and will be exported in Node.js runtime. +- [Breaking] IE11 needs `Array.prototype.includes` and `Object.keys` polyfills loaded. +- Updated dependency `ms-rest-js` to `@azure/ms-rest-js`. +- Fixed `Aborter.timeout()` misleading scale description. +- Added option `maxSingleShotSize` to customize concurrency upload threshold in bytes for highlevel uploading APIs, like `uploadBrowserDataToBlockBlob` or `uploadFileToBlockBlob`. +- Removed default 60s server timeout value for retry options `tryTimeoutInMs` to avoid large blob download stream unexcepted ending. +- Fixed an issue that when body is string with special characters, `BlockBlobULR.upload` will fail to upload. + +## 10.2.0-preview (2018-11-27) + +- [Breaking] Updated names of exported interfaces `IHTTPPipelineLogger` & `IHTTPClient` to `IHttpPipelineLogger` & `IHttpClient`. +- [Breaking] For `setMetadata()` and `setHTTPHeaders()`, `metadata` and `blobHTTPHeaders` are moved from `options` into top level parameter list. +- Fixed bugs and typos in samples. +- Fixed a bug during generateAccountSASQueryParameters() that generated signature is not valid. +- Fixed a bug during generateBlobSASQueryParameters() that cache-control, content-type, content-disposition, content-encoding and content-language are not supported. +- Fixed a bug in SAS generation that start and expiry time format is not correct. +- Removed `File` from `uploadBrowserDataToBlockBlob` parameter type list, because `File` extends `Blob` which is already in the list. +- Fixed typos in `IRange` comments. +- Removed useless `marker` field from option of `ServiceURL.listContainersSegment` method. +- Fixed a bug that `timeout` parameter should use second as unit instead of millisecond. +- Added stream retry when `BlobURL.download` response stream unexcepted ends. + +## 10.1.0-preview (2018-09-29) + +- Fixed sharedkey authentication error when blob names have spaces. +- Updated samples in readme and sample folder to fix undefined headers. +- Updated readme samples to make it runnable as copy/paste. +- More documentation around ACCOUNT_SAS and CORS in readme.md and contributing.md. +- Size of browser bundle is reduced from 229KB to 175KB (minified version). Thanks Brian Terlson & Rikki Gibson! +- Set `sideEffects` option to `true` in package.json, which helps webpack4 for tree shaking. +- Updated `browser` and `module` option in package.json, webpack will try to load ES6 module. +- Added prettier config file. +- Fixed typos and unused imports. +- [Breaking] Dropped built-in polyfills for `String`, and following polyfills need to be loaded external for IE11 now: + - `Promise` + - `String.prototype.startsWith` + - `String.prototype.endsWith` + - `String.prototype.repeat` + - `String.prototype.includes` +- [Breaking] `Aborter.None` is renamed to `Aborter.none` for JavaScript naming conventions. + +## 10.0.0-preview (2018-09-10) + +- Initial Release. API version 2018-03-28 supported. Please see the README for information on the new design. diff --git a/node_modules/@azure/storage-blob/LICENSE b/node_modules/@azure/storage-blob/LICENSE new file mode 100644 index 00000000..ea8fb151 --- /dev/null +++ b/node_modules/@azure/storage-blob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/storage-blob/README.md b/node_modules/@azure/storage-blob/README.md new file mode 100644 index 00000000..fa7a9e8b --- /dev/null +++ b/node_modules/@azure/storage-blob/README.md @@ -0,0 +1,533 @@ +# Azure Storage Blob client library for JavaScript + +Azure Storage Blob is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data. Unstructured data is data that does not adhere to a particular data model or definition, such as text or binary data. + +This project provides a client library in JavaScript that makes it easy to consume Microsoft Azure Storage Blob service. + +Use the client libraries in this package to: + +- Get/Set Blob Service Properties +- Create/List/Delete Containers +- Create/Read/List/Update/Delete Block Blobs +- Create/Read/List/Update/Delete Page Blobs +- Create/Read/List/Update/Delete Append Blobs + +Key links + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob) +- [Package (npm)](https://www.npmjs.com/package/@azure/storage-blob/) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/storage-blob) +- [Product documentation](https://docs.microsoft.com/azure/storage/blobs/storage-blobs-overview) +- [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples) +- [Azure Storage Blob REST APIs](https://docs.microsoft.com/rest/api/storageservices/blob-service-rest-api) + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://nodejs.org/about/releases/) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Prerequisites + +- An [Azure subscription](https://azure.microsoft.com/free/) +- A [Storage Account](https://docs.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal) + +### Install the package + +The preferred way to install the Azure Storage Blob client library for JavaScript is to use the npm package manager. Type the following into a terminal window: + +```bash +npm install @azure/storage-blob +``` + +### Authenticate the client + +Azure Storage supports several ways to authenticate. In order to interact with the Azure Blob Storage service you'll need to create an instance of a Storage client - `BlobServiceClient`, `ContainerClient`, or `BlobClient` for example. See [samples for creating the `BlobServiceClient`](#create-the-blob-service-client) to learn more about authentication. + +- [Azure Active Directory](#with-defaultazurecredential-from-azureidentity-package) +- [Shared Key](#with-storagesharedkeycredential) +- [Shared access signatures](#with-sas-token) + +#### Azure Active Directory + +The Azure Blob Storage service supports the use of Azure Active Directory to authenticate requests to its APIs. The [`@azure/identity`](https://www.npmjs.com/package/@azure/identity) package provides a variety of credential types that your application can use to do this. Please see the [README for `@azure/identity`](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/identity/identity/README.md) for more details and samples to get you started. + +### Compatibility + +This library is compatible with Node.js and browsers, and validated against LTS Node.js versions (>=8.16.0) and latest versions of Chrome, Firefox and Edge. + +#### Web Workers + +This library requires certain DOM objects to be globally available when used in the browser, which web workers do not make available by default. You will need to polyfill these to make this library work in web workers. + +For more information please refer to our [documentation for using Azure SDK for JS in Web Workers](https://aka.ms/azsdk/js/web-workers) + +This library depends on following DOM APIs which need external polyfills loaded when used in web workers: + +- [`document`](https://developer.mozilla.org/docs/Web/API/Document) +- [`DOMParser`](https://developer.mozilla.org/docs/Web/API/DOMParser) +- [`Node`](https://developer.mozilla.org/docs/Web/API/Node) +- [`XMLSerializer`](https://developer.mozilla.org/docs/Web/API/XMLSerializer) + +#### Differences between Node.js and browsers + +There are differences between Node.js and browsers runtime. When getting started with this library, pay attention to APIs or classes marked with _"ONLY AVAILABLE IN NODE.JS RUNTIME"_ or _"ONLY AVAILABLE IN BROWSERS"_. + +- If a blob holds compressed data in `gzip` or `deflate` format and its content encoding is set accordingly, downloading behavior is different between Node.js and browsers. In Node.js storage clients will download the blob in its compressed format, while in browsers the data will be downloaded in de-compressed format. + +##### Features, interfaces, classes or functions only available in Node.js + +- Shared Key Authorization based on account name and account key + - `StorageSharedKeyCredential` +- Shared Access Signature(SAS) generation + - `generateAccountSASQueryParameters()` + - `generateBlobSASQueryParameters()` +- Parallel uploading and downloading. Note that `BlockBlobClient.uploadData()` is available in both Node.js and browsers. + - `BlockBlobClient.uploadFile()` + - `BlockBlobClient.uploadStream()` + - `BlobClient.downloadToBuffer()` + - `BlobClient.downloadToFile()` + +##### Features, interfaces, classes or functions only available in browsers + +- Parallel uploading and downloading + - `BlockBlobClient.uploadBrowserData()` + +### JavaScript Bundle + +To use this client library in the browser, first you need to use a bundler. For details on how to do this, please refer to our [bundling documentation](https://aka.ms/AzureSDKBundling). + +### CORS + +You need to set up [Cross-Origin Resource Sharing (CORS)](https://docs.microsoft.com/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services) rules for your storage account if you need to develop for browsers. Go to Azure portal and Azure Storage Explorer, find your storage account, create new CORS rules for blob/queue/file/table service(s). + +For example, you can create following CORS settings for debugging. But please customize the settings carefully according to your requirements in production environment. + +- Allowed origins: \* +- Allowed verbs: DELETE,GET,HEAD,MERGE,POST,OPTIONS,PUT +- Allowed headers: \* +- Exposed headers: \* +- Maximum age (seconds): 86400 + +## Key concepts + +Blob storage is designed for: + +- Serving images or documents directly to a browser. +- Storing files for distributed access. +- Streaming video and audio. +- Writing to log files. +- Storing data for backup and restore, disaster recovery, and archiving. +- Storing data for analysis by an on-premises or Azure-hosted service. + +Blob storage offers three types of resources: + +- The _storage account_ used via `BlobServiceClient` +- A _container_ in the storage account used via `ContainerClient` +- A _blob_ in a container used via `BlobClient` + +## Examples + +- [Import the package](#import-the-package) +- [Create the blob service client](#create-the-blob-service-client) +- [Create a new container](#create-a-new-container) +- [List the containers](#list-the-containers) +- [Create a blob by uploading data](#create-a-blob-by-uploading-data) +- [List blobs inside a container](#list-blobs-inside-a-container) +- [Download a blob and convert it to a string (Node.js)](#download-a-blob-and-convert-it-to-a-string-nodejs) +- [Download a blob and convert it to a string (Browsers)](#download-a-blob-and-convert-it-to-a-string-browsers) + +### Import the package + +To use the clients, import the package into your file: + +```javascript +const AzureStorageBlob = require("@azure/storage-blob"); +``` + +Alternatively, selectively import only the types you need: + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); +``` + +### Create the blob service client + +The `BlobServiceClient` requires an URL to the blob service and an access credential. It also optionally accepts some settings in the `options` parameter. + +#### with `DefaultAzureCredential` from `@azure/identity` package + +**Recommended way to instantiate a `BlobServiceClient`** + +Setup : Reference - Authorize access to blobs and queues with Azure Active Directory from a client application - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-app + +- Register a new AAD application and give permissions to access Azure Storage on behalf of the signed-in user + + - Register a new application in the Azure Active Directory(in the azure-portal) - https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app + - In the `API permissions` section, select `Add a permission` and choose `Microsoft APIs`. + - Pick `Azure Storage` and select the checkbox next to `user_impersonation` and then click `Add permissions`. This would allow the application to access Azure Storage on behalf of the signed-in user. + +- Grant access to Azure Blob data with RBAC in the Azure Portal + + - RBAC roles for blobs and queues - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal. + - In the azure portal, go to your storage-account and assign **Storage Blob Data Contributor** role to the registered AAD application from `Access control (IAM)` tab (in the left-side-navbar of your storage account in the azure-portal). + +- Environment setup for the sample + - From the overview page of your AAD Application, note down the `CLIENT ID` and `TENANT ID`. In the "Certificates & Secrets" tab, create a secret and note that down. + - Make sure you have AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET as environment variables to successfully execute the sample(Can leverage process.env). + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +// Enter your storage account name +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); +``` + +See the [Azure AD Auth sample](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/javascript/azureAdAuth.js) for a complete example using this method. + +[Note - Above steps are only for Node.js] + +#### using connection string + +Alternatively, you can instantiate a `BlobServiceClient` using the `fromConnectionString()` static method with the full connection string as the argument. (The connection string can be obtained from the azure portal.) [ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const connStr = ""; + +const blobServiceClient = BlobServiceClient.fromConnectionString(connStr); +``` + +#### with `StorageSharedKeyCredential` + +Alternatively, you instantiate a `BlobServiceClient` with a `StorageSharedKeyCredential` by passing account-name and account-key as arguments. (The account-name and account-key can be obtained from the azure portal.) +[ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); + +// Enter your storage account name and shared key +const account = ""; +const accountKey = ""; + +// Use StorageSharedKeyCredential with storage account and account key +// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers +const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey); +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + sharedKeyCredential +); +``` + +#### with SAS Token + +Also, You can instantiate a `BlobServiceClient` with a shared access signatures (SAS). You can get the SAS token from the Azure Portal or generate one using `generateAccountSASQueryParameters()`. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); +``` + +### Create a new container + +Use `BlobServiceClient.getContainerClient()` to get a container client instance then create a new container resource. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + // Create a container + const containerName = `newcontainer${new Date().getTime()}`; + const containerClient = blobServiceClient.getContainerClient(containerName); + const createContainerResponse = await containerClient.create(); + console.log(`Create container ${containerName} successfully`, createContainerResponse.requestId); +} + +main(); +``` + +### List the containers + +Use `BlobServiceClient.listContainers()` function to iterate the containers, +with the new `for-await-of` syntax: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let containers = blobServiceClient.listContainers(); + for await (const container of containers) { + console.log(`Container ${i++}: ${container.name}`); + } +} + +main(); +``` + +Alternatively without using `for-await-of`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let iter = blobServiceClient.listContainers(); + let containerItem = await iter.next(); + while (!containerItem.done) { + console.log(`Container ${i++}: ${containerItem.value.name}`); + containerItem = await iter.next(); + } +} + +main(); +``` + +In addition, pagination is supported for listing too via `byPage()`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + if (response.containerItems) { + for (const container of response.containerItems) { + console.log(`Container ${i++}: ${container.name}`); + } + } + } +} + +main(); +``` + +For a complete sample on iterating containers please see [samples/v12/typescript/src/listContainers.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listContainers.ts). + +### Create a blob by uploading data + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + const content = "Hello world!"; + const blobName = "newblob" + new Date().getTime(); + const blockBlobClient = containerClient.getBlockBlobClient(blobName); + const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId); +} + +main(); +``` + +### List blobs inside a container + +Similar to listing containers. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + let i = 1; + let blobs = containerClient.listBlobsFlat(); + for await (const blob of blobs) { + console.log(`Blob ${i++}: ${blob.name}`); + } +} + +main(); +``` + +For a complete sample on iterating blobs please see [samples/v12/typescript/src/listBlobsFlat.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listBlobsFlat.ts). + +### Download a blob and convert it to a string (Node.js) + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; +const blobName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = ( + await streamToBuffer(downloadBlockBlobResponse.readableStreamBody) + ).toString(); + console.log("Downloaded blob content:", downloaded); + + // [Node.js only] A helper method used to read a Node.js readable stream into a Buffer + async function streamToBuffer(readableStream) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); + } +} + +main(); +``` + +### Download a blob and convert it to a string (Browsers). + +Please refer to the [JavaScript Bundle](#javascript-bundle) section for more information on using this library in the browser. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; +const containerName = ""; +const blobName = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + console.log("Downloaded blob content", downloaded); + + // [Browsers only] A helper method used to convert a browser Blob into string. + async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); + } +} + +main(); +``` + +A complete example of simple scenarios is at [samples/v12/typescript/src/sharedKeyAuth.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/sharedKeyAuth.ts). + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +More code samples: + +- [Blob Storage Samples (JavaScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/javascript) +- [Blob Storage Samples (TypeScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/typescript) +- [Blob Storage Test Cases](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/test/) + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +Also refer to [Storage specific guide](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/CONTRIBUTING.md) for additional information on setting up the test environment for storage libraries. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fstorage%2Fstorage-blob%2FREADME.png) diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js new file mode 100644 index 00000000..bd2fea40 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=BatchResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map new file mode 100644 index 00000000..43638fcd --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponse.js","sourceRoot":"","sources":["../../../src/BatchResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { HttpHeaders } from \"@azure/core-http\";\n\n/**\n * The response data associated with a single request within a batch operation.\n */\nexport interface BatchSubResponse {\n /**\n * The status code of the sub operation.\n */\n status: number;\n\n /**\n * The status message of the sub operation.\n */\n statusMessage: string;\n\n /**\n * The error code of the sub operation, if the sub operation failed.\n */\n errorCode?: string;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeaders;\n\n /**\n * The body as text.\n */\n bodyAsText?: string;\n\n /**\n * The batch sub request corresponding to the sub response.\n */\n _request: BatchSubRequest;\n}\n\n/**\n * The multipart/mixed response which contains the response for each subrequest.\n */\nexport interface ParsedBatchResponse {\n /**\n * The parsed sub responses.\n */\n subResponses: BatchSubResponse[];\n\n /**\n * The succeeded executed sub responses' count;\n */\n subResponsesSucceededCount: number;\n\n /**\n * The failed executed sub responses' count;\n */\n subResponsesFailedCount: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js new file mode 100644 index 00000000..1d083adc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "@azure/core-http"; +import { HTTP_VERSION_1_1, HTTP_LINE_ENDING, HeaderConstants, HTTPURLConnection, } from "./utils/constants"; +import { getBodyAsText } from "./BatchUtils"; +import { logger } from "./log"; +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +export class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} +//# sourceMappingURL=BatchResponseParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map new file mode 100644 index 00000000..818e7e9b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponseParser.js","sourceRoot":"","sources":["../../../src/BatchResponseParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAG/C,OAAO,EACL,gBAAgB,EAChB,gBAAgB,EAChB,eAAe,EACf,iBAAiB,GAClB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAG7C,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAE/B,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAO9B,YACE,aAA8C,EAC9C,WAAyC;QAEzC,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;YAChD,+FAA+F;YAC/F,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;SAC3F;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;YAC1C,wCAAwC;YACxC,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;SAClF;QAED,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,KAAK,IAAI,CAAC,qBAAqB,GAAG,gBAAgB,EAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,KAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;IACjE,CAAC;IAED,yHAAyH;IAClH,KAAK,CAAC,kBAAkB;QAC7B,uGAAuG;QACvG,0BAA0B;QAC1B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;YAC3E,MAAM,IAAI,KAAK,CACb,qDAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,IAAI,CAC7F,CAAC;SACH;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,iCAAiC;aACpE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;aAC7B,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,mDAAmD;QAChE,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;QAE7C,uDAAuD;QACvD,4EAA4E;QAC5E,+FAA+F;QAC/F,uGAAuG;QACvG,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;YACxE,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;SAC7F;QAED,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;QAExC,0BAA0B;QAC1B,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;YACrD,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;YACvD,uBAAuB,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;YAE1B,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;oBAC5B,yCAAyC;oBACzC,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;wBACvD,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;qBACpE;oBAED,oFAAoF;oBACpF,iCAAiC;oBACjC,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;wBACrD,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;qBAC/E;oBAED,SAAS,CAAC,iHAAiH;iBAC5H;gBAED,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;oBAC9B,4GAA4G;oBAC5G,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;qBAC9B;oBAED,SAAS,CAAC,kBAAkB;iBAC7B;gBAED,2EAA2E;gBAC3E,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;wBACtD,2DAA2D;wBAC3D,MAAM,IAAI,KAAK,CACb,uCAAuC,YAAY,oCAAoC,qBAAqB,IAAI,CACjH,CAAC;qBACH;oBAED,iCAAiC;oBACjC,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;oBACzD,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;wBACjD,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;qBACtB;iBACF;qBAAM;oBACL,iCAAiC;oBACjC,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;wBACvC,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;qBACzC;oBAED,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;iBACpD;aACF,CAAC,gBAAgB;YAElB,kHAAkH;YAClH,uHAAuH;YACvH,oHAAoH;YACpH,uHAAuH;YACvH,IACE,SAAS,KAAK,SAAS;gBACvB,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;gBAC3B,SAAS,IAAI,CAAC;gBACd,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;gBACjC,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;gBACpE,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;aAC/D;iBAAM;gBACL,MAAM,CAAC,KAAK,CACV,gBAAgB,KAAK,uEAAuE,SAAS,EAAE,CACxG,CAAC;aACH;YAED,IAAI,aAAa,EAAE;gBACjB,uBAAuB,EAAE,CAAC;aAC3B;iBAAM;gBACL,0BAA0B,EAAE,CAAC;aAC9B;SACF;QAED,OAAO;YACL,YAAY,EAAE,wBAAwB;YACtC,0BAA0B,EAAE,0BAA0B;YACtD,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js new file mode 100644 index 00000000..1a3926a1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { blobToString } from "./utils/utils.browser"; +export async function getBodyAsText(batchResponse) { + const blob = (await batchResponse.blobBody); + return blobToString(blob); +} +export function utf8ByteLength(str) { + return new Blob([str]).size; +} +//# sourceMappingURL=BatchUtils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map new file mode 100644 index 00000000..3d84ba60 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.browser.js","sourceRoot":"","sources":["../../../src/BatchUtils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,MAAM,IAAI,GAAG,CAAC,MAAM,aAAa,CAAC,QAAQ,CAAS,CAAC;IACpD,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { blobToString } from \"./utils/utils.browser\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n const blob = (await batchResponse.blobBody) as Blob;\n return blobToString(blob);\n}\n\nexport function utf8ByteLength(str: string): number {\n return new Blob([str]).size;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js new file mode 100644 index 00000000..35be3b10 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { streamToBuffer2 } from "./utils/utils.node"; +import { BATCH_MAX_PAYLOAD_IN_BYTES } from "./utils/constants"; +export async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +export function utf8ByteLength(str) { + return Buffer.byteLength(str); +} +//# sourceMappingURL=BatchUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map new file mode 100644 index 00000000..959a5a1e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.js","sourceRoot":"","sources":["../../../src/BatchUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;IAEF,6CAA6C;IAC7C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;IAEzC,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js new file mode 100644 index 00000000..6de406bf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, generateUuid, HttpHeaders, WebResource, isTokenCredential, bearerTokenAuthenticationPolicy, isNode, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { BlobClient } from "./Clients"; +import { Mutex } from "./utils/Mutex"; +import { Pipeline } from "./Pipeline"; +import { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common"; +import { HeaderConstants, BATCH_MAX_REQUEST, HTTP_VERSION_1_1, HTTP_LINE_ENDING, StorageOAuthScopes, } from "./utils/constants"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { createSpan } from "./utils/tracing"; +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = isTokenCredential(credential) + ? attachCredential(bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new WebResource(), + status: 200, + headers: new HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=BlobBatch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map new file mode 100644 index 00000000..a6351fa9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatch.js","sourceRoot":"","sources":["../../../src/BlobBatch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,YAAY,EACZ,WAAW,EAKX,WAAW,EAEX,iBAAiB,EACjB,+BAA+B,EAC/B,MAAM,GACP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,UAAU,EAAyC,MAAM,WAAW,CAAC;AAE9E,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAChG,OAAO,EACL,eAAe,EACf,iBAAiB,EACjB,gBAAgB,EAChB,gBAAgB,EAChB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAmB7C;;;GAGG;AACH,MAAM,OAAO,SAAS;IAKpB;QAHiB,UAAK,GAAW,OAAO,CAAC;QAIvC,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACI,uBAAuB;QAC5B,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;IACrD,CAAC;IAED;;OAEG;IACI,kBAAkB;QACvB,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,cAAc;QACnB,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;IAC5C,CAAC;IAEO,KAAK,CAAC,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;YAC/B,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;SACjD;gBAAS;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;SAChC;IACH,CAAC;IAEO,YAAY,CAAC,SAAqC;QACxD,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;SAC5B;QACD,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,yFAAyF,IAAI,CAAC,SAAS,cAAc,CACtH,CAAC;SACH;IACH,CAAC;IAqCM,KAAK,CAAC,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B;QAE3B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,mBAAmB,YAAY,0BAA0B,CAAC;gBACpE,mBAAmB,YAAY,mBAAmB;gBAClD,iBAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;SAClC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;SACpD;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAgDM,KAAK,CAAC,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B;QAE5B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QACnF,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,gBAAgB,YAAY,0BAA0B,CAAC;gBACjE,gBAAgB,YAAY,mBAAmB;gBAC/C,iBAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;SACpC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;SAC/C;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,iBAAiB;IASrB;QACE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QAEf,MAAM,QAAQ,GAAG,YAAY,EAAE,CAAC;QAEhC,kBAAkB;QAClB,IAAI,CAAC,QAAQ,GAAG,SAAS,QAAQ,EAAE,CAAC;QACpC,oBAAoB;QACpB,iCAAiC;QACjC,oCAAoC;QACpC,IAAI,CAAC,gBAAgB,GAAG,KAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,GAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,GAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;QACxL,4CAA4C;QAC5C,IAAI,CAAC,oBAAoB,GAAG,6BAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;QACzE,sBAAsB;QACtB,IAAI,CAAC,kBAAkB,GAAG,KAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;QAEjD,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;IAC/B,CAAC;IAED;;;;;;OAMG;IACI,cAAc,CACnB,UAA8E;QAE9E,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;QACnE,MAAM,mBAAmB,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,mHAAmH;QAC/K,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAG,qBAAqB,EAAE,CAAC,CAAC,8DAA8D;QACtG,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC,CAAC,gEAAgE;QACrH,IAAI,CAAC,gBAAgB,EAAE;YACrB,SAAS,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,UAAU,CAAC;gBAC1C,CAAC,CAAC,gBAAgB,CACd,+BAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;gBACH,CAAC,CAAC,UAAU,CAAC;SAChB;QACD,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC,yFAAyF;QAE3K,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IACrC,CAAC;IAEM,sBAAsB,CAAC,OAAoB;QAChD,gCAAgC;QAChC,IAAI,CAAC,IAAI,IAAI;YACX,IAAI,CAAC,gBAAgB;YACrB,GAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,EAAE;YACvD,EAAE;YACF,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,IAAI,gBAAgB,GAAG,gBAAgB,EAAE,EAAE,qCAAqC;SAClF,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,CAAC,IAAI,IAAI,GAAG,MAAM,CAAC,IAAI,KAAK,MAAM,CAAC,KAAK,GAAG,gBAAgB,EAAE,CAAC;SACnE;QAED,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC,CAAC,0DAA0D;QACzF,wDAAwD;QACxD,8BAA8B;IAChC,CAAC;IAEM,gBAAgB,CAAC,UAA2B;QACjD,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;YAC5C,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,iCAAiC,CAAC,CAAC;SAC3F;QAED,8CAA8C;QAC9C,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QACxC,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,iCAAiC,UAAU,CAAC,GAAG,GAAG,CAAC,CAAC;SAC1E;IACH,CAAC;IAEM,iBAAiB,CAAC,UAA2B;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;IACxB,CAAC;IAED,wFAAwF;IACjF,kBAAkB;QACvB,OAAO,GAAG,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,kBAAkB,GAAG,gBAAgB,EAAE,CAAC;IACrE,CAAC;IAEM,uBAAuB;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;IACnC,CAAC;IAEM,cAAc;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF;AAED,MAAM,0BAA2B,SAAQ,iBAAiB;IAQxD,YACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B;QAE7B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAXZ,kBAAa,GAA0B;YACtD,OAAO,EAAE,IAAI,WAAW,EAAE;YAC1B,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI,WAAW,EAAE;SAC3B,CAAC;QASA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;QAExD,OAAO,IAAI,CAAC,aAAa,CAAC,CAAC,uCAAuC;IACpE,CAAC;CACF;AAED,MAAM,iCAAiC;IAGrC,YAAY,YAA+B;QACzC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;IAChF,CAAC;CACF;AAED,MAAM,uBAAwB,SAAQ,iBAAiB;IACrD,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;gBACrD,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;aAC7B;SACF;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,2DAA2D;SACnG;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF;AAED,MAAM,8BAA8B;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js new file mode 100644 index 00000000..f15f53da --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BatchResponseParser } from "./BatchResponseParser"; +import { utf8ByteLength } from "./BatchUtils"; +import { BlobBatch } from "./BlobBatch"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { Service, Container } from "./generated/src/operations"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { getURLPath } from "./utils/utils.common"; +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobBatchClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map new file mode 100644 index 00000000..5d47ea91 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatchClient.js","sourceRoot":"","sources":["../../../src/BlobBatchClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AASlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAEjF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAEhE,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAExE,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAC5E,OAAO,EAAwC,WAAW,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAiClD;;;;GAIG;AACH,MAAM,OAAO,eAAe;IA8B1B,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IAAI,CAAC,oBAAoB,EAAE;YAChC,yBAAyB;YACzB,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM;YACL,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;QAED,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;QAE9F,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QAC7B,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;YACxB,oBAAoB;YACpB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;SACtE;aAAM;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;SACpE;IACH,CAAC;IAED;;;OAGG;IACI,WAAW;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;IACzB,CAAC;IAsCM,KAAK,CAAC,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;IACb,mFAAmF;IACnF,gEAAgE;IAChE,OAA2B;QAE3B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;aAC1F;iBAAM;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;aACnF;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAkDM,KAAK,CAAC,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;IAC/C,mFAAmF;IACnF,gEAAgE;IAChE,OAA4B;QAE5B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;aACH;iBAAM;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;aACH;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkCG;IACI,KAAK,CAAC,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;YAC7D,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;SAChF;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;YAE3D,2FAA2F;YAC3F,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,kCAEX,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEJ,gHAAgH;YAChH,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;YACF,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;YAEvE,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js new file mode 100644 index 00000000..567242bd --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BlobDownloadResponse" for some browser bundlers +// when trying to bundle "BlobDownloadResponse" +// "BlobDownloadResponse" class is only available in Node.js runtime +export const BlobDownloadResponse = 1; +//# sourceMappingURL=BlobDownloadResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map new file mode 100644 index 00000000..f1a82af1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,kFAAkF;AAClF,+CAA+C;AAC/C,oEAAoE;AACpE,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BlobDownloadResponse\" for some browser bundlers\n// when trying to bundle \"BlobDownloadResponse\"\n// \"BlobDownloadResponse\" class is only available in Node.js runtime\nexport const BlobDownloadResponse = 1;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js new file mode 100644 index 00000000..858c3b6e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js @@ -0,0 +1,455 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { RetriableReadableStream, } from "./utils/RetriableReadableStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +export class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobDownloadResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map new file mode 100644 index 00000000..d5a70d40 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAYxD,OAAO,EAEL,uBAAuB,GAExB,MAAM,iCAAiC,CAAC;AAEzC;;;;;;;;;GASG;AACH,MAAM,OAAO,oBAAoB;IA6d/B;;;;;;;;OAQG;IACH,YACE,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;IACJ,CAAC;IApfD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,IAAW,gBAAgB;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;IAChD,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,IAAW,oCAAoC;QAC7C,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;IACpE,CAAC;IAED;;;;OAIG;IACH,IAAW,iCAAiC;QAC1C,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;IACjE,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,2BAA2B;QACpC,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;IAC3D,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CA8BF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js new file mode 100644 index 00000000..796c15b7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { generateUuid } from "@azure/core-http"; +import { StorageClientContext } from "./generated/src/index"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Blob as StorageBlob, Container } from "./generated/src/operations"; +import { ETagNone } from "./utils/constants"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new StorageBlob(clientContext); + } + if (!leaseId) { + leaseId = generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobLeaseClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map new file mode 100644 index 00000000..6ab89f40 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobLeaseClient.js","sourceRoot":"","sources":["../../../src/BlobLeaseClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,YAAY,EAAgB,MAAM,kBAAkB,CAAC;AAC9D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAG5E,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAqFjF;;GAEG;AACH,MAAM,OAAO,eAAe;IAwB1B;;;;OAIG;IACH,YAAY,MAAoC,EAAE,OAAgB;QAChE,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;QACF,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;QAEvB,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;YAC7C,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;SAC/D;aAAM;YACL,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,CAAC;SACjE;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,YAAY,EAAE,CAAC;SAC1B;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IA1CD;;;;OAIG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IA4BD;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,IAAI,CAAC,QAAQ,IAC3B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YACF,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;YAChC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CAAC,UAAiC,EAAE;;QAC3D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CAAC,UAAiC,EAAE;;QACzD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,gBAAgB,mBACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;SAC1E;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js new file mode 100644 index 00000000..f78c5b50 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js @@ -0,0 +1,362 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN BROWSER RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, _options = {}) { + this.originalResponse = originalResponse; + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + throw Error(`Quick query in browser is not supported yet.`); + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * @readonly + */ + get readableStreamBody() { + return undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map new file mode 100644 index 00000000..6d4069e7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IA+X5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,WAAwC,EAAE;QAE1C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;IAC3C,CAAC;IAzYD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,MAAM,KAAK,CAAC,8CAA8C,CAAC,CAAC;IAC9D,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAgBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN BROWSER RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n throw Error(`Quick query in browser is not supported yet.`);\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n _options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js new file mode 100644 index 00000000..91dac643 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js @@ -0,0 +1,367 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map new file mode 100644 index 00000000..44f63e60 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAaxD,OAAO,EAAE,oBAAoB,EAA+B,MAAM,8BAA8B,CAAC;AAEjG;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IAkY5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,UAAuC,EAAE;QAEzC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;IACJ,CAAC;IAhZD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAqBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js new file mode 100644 index 00000000..327cf23d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js @@ -0,0 +1,786 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isTokenCredential, isNode, getDefaultProxySettings, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Container, Service } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { ContainerClient, } from "./ContainerClient"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, toTags, } from "./utils/utils.common"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import "@azure/core-paging"; +import { truncatedISO8061Date } from "./utils/utils.common"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { BlobBatchClient } from "./BlobBatchClient"; +import { StorageClient } from "./StorageClient"; +import { AccountSASPermissions } from "./sas/AccountSASPermissions"; +import { generateAccountSASQueryParameters } from "./sas/AccountSASSignatureValues"; +import { AccountSASServices } from "./sas/AccountSASServices"; +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield __await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield __await(yield __await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} +//# sourceMappingURL=BlobServiceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map new file mode 100644 index 00000000..16008dfd --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobServiceClient.js","sourceRoot":"","sources":["../../../src/BlobServiceClient.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAEL,iBAAiB,EACjB,MAAM,EAEN,uBAAuB,GACxB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAsBrD,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAChE,OAAO,EAAE,WAAW,EAAwC,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,eAAe,GAGhB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,oBAAoB,CAAC;AAE5B,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,qBAAqB,EAAE,MAAM,6BAA6B,CAAC;AAGpE,OAAO,EAAE,iCAAiC,EAAE,MAAM,iCAAiC,CAAC;AACpF,OAAO,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AA4S9D;;;GAGG;AACH,MAAM,OAAO,iBAAkB,SAAQ,aAAa;IAuGlD,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IACL,CAAC,MAAM,IAAI,oBAAoB,YAAY,0BAA0B,CAAC;YACtE,oBAAoB,YAAY,mBAAmB;YACnD,iBAAiB,CAAC,oBAAoB,CAAC,EACvC;YACA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;aAAM;YACL,8DAA8D;YAC9D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAC/D,CAAC;IA3HD;;;;;;;;;;;OAWG;IACI,MAAM,CAAC,oBAAoB,CAChC,gBAAwB;IACxB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QACtE,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;YAC/C,IAAI,MAAM,EAAE;gBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;gBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;iBACzE;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;SACF;aAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;YACjE,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;SAC9F;aAAM;YACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;SACH;IACH,CAAC;IAiFD;;;;;;;;;;;OAWG;IACI,kBAAkB,CAAC,aAAqB;QAC7C,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE;QAKpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE;QAK7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;YACF,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,IACpB,cAAc,EACjB,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;SACvD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACH,gEAAgE;IAChE,8HAA8H;IACtH,KAAK,CAAC,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE;;QAK3C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;YAC1E,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,kCAC5E,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,0CAAE,OAAO,IAC/C,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,6CACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,IACH,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,KAC/E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,iBACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA8EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAAwC,EAAE;QAE1C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA+C,EAAE;;YAEjD,IAAI,6BAA6B,CAAC;YAClC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,cAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAClF,6BAA6B,CAAC,cAAc;wBAC1C,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;oBACrD,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;oBACzD,oBAAM,cAAM,6BAA6B,CAAA,CAAA,CAAC;iBAC3C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA+C,EAAE;;;YAEjD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;iBAC/B;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAyEG;IACI,cAAc,CACnB,UAAwC,EAAE;QAE1C,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;SACxB;QAED,mDAAmD;QACnD,MAAM,kBAAkB,mCACnB,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAC3C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;gBACE,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBAC/C,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,kBAEC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEF,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;gBACjD,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;YAEF,MAAM,GAAG,mBACP,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,IAC1B,iBAAiB,CACrB,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,qBAAqB,CAC1B,SAAgB,EAChB,cAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,gBAAwB,KAAK,EAC7B,UAA+C,EAAE;QAEjD,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;YAC5D,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;SACH;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;SACnD;QAED,MAAM,GAAG,GAAG,iCAAiC,iBAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAC/C,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACzC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container.\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js new file mode 100644 index 00000000..438cb337 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js @@ -0,0 +1,2751 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { generateUuid, getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { BlobDownloadResponse } from "./BlobDownloadResponse"; +import { BlobQueryResponse } from "./BlobQueryResponse"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from "./generated/src/operations"; +import { ensureCpkIfSpecified, toAccessTier, } from "./models"; +import { rangeResponseFromModel, } from "./PageBlobRangeResponse"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { BlobBeginCopyFromUrlPoller, } from "./pollers/BlobStartCopyFromUrlPoller"; +import { rangeToString } from "./Range"; +import { StorageClient } from "./StorageClient"; +import { Batch } from "./utils/Batch"; +import { BufferScheduler } from "../../storage-common/src"; +import { BlobDoesNotUseCustomerSpecifiedEncryption, BlobUsesCustomerSpecifiedEncryptionMsg, BLOCK_BLOB_MAX_BLOCKS, BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES, BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES, DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES, DEFAULT_BLOCK_BUFFER_SIZE_BYTES, DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS, ETagAny, URLConstants, } from "./utils/constants"; +import { createSpan, convertTracingToRequestOptionsBase } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, ExtractPageRangeInfoItems, generateBlockID, getURLParameter, httpAuthorizationToString, isIpEndpointStyle, parseObjectReplicationRecord, setURLParameter, toBlobTags, toBlobTagsString, toQuerySerialization, toTags, } from "./utils/utils.common"; +import { fsCreateReadStream, fsStat, readStreamToLocalFile, streamToBuffer, } from "./utils/utils.node"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new StorageBlob(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new StorageBlob(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=Clients.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map new file mode 100644 index 00000000..0f8b3e7a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Clients.js","sourceRoot":"","sources":["../../../src/Clients.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,YAAY,EACZ,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAGjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAGrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,4BAA4B,CAAC;AA8ClG,OAAO,EAKL,oBAAoB,EAMpB,YAAY,GAQb,MAAM,UAAU,CAAC;AAClB,OAAO,EAGL,sBAAsB,GACvB,MAAM,yBAAyB,CAAC;AACjC,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,0BAA0B,GAG3B,MAAM,sCAAsC,CAAC;AAC9C,OAAO,EAAS,aAAa,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3D,OAAO,EACL,yCAAyC,EACzC,sCAAsC,EACtC,qBAAqB,EACrB,gCAAgC,EAChC,gCAAgC,EAChC,iCAAiC,EACjC,+BAA+B,EAC/B,mCAAmC,EACnC,OAAO,EACP,YAAY,GACb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,UAAU,EAAE,kCAAkC,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,yBAAyB,EACzB,eAAe,EACf,eAAe,EACf,yBAAyB,EACzB,iBAAiB,EACjB,4BAA4B,EAC5B,eAAe,EACf,UAAU,EACV,gBAAgB,EAChB,oBAAoB,EACpB,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EACL,kBAAkB,EAClB,MAAM,EACN,qBAAqB,EACrB,cAAc,GACf,MAAM,oBAAoB,CAAC;AAG5B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAssBpD;;;GAGG;AACH,MAAM,OAAO,UAAW,SAAQ,aAAa;IAqF3C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QAED,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;YAC3D,IAAI,CAAC,+BAA+B,EAAE,CAAC,CAAC;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAE9D,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;QACvF,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;IAC3F,CAAC;IArKD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IA2JD;;;;;;OAMG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,WAAW,CAAC,SAAiB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;OAGG;IACI,mBAAmB;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA0DG;IACI,KAAK,CAAC,QAAQ,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACzC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,kBAAkB,EAAE,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,+DAA+D;iBAC7H,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,UAAU,mCACX,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,GAC5F,CAAC;YACF,sCAAsC;YACtC,IAAI,CAAC,MAAM,EAAE;gBACX,OAAO,UAAU,CAAC;aACnB;YAED,8EAA8E;YAC9E,uEAAuE;YACvE,uEAAuE;YACvE,sGAAsG;YACtG,gDAAgD;YAChD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;gBAC1E,uDAAuD;gBACvD,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;aAChE;YAED,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,oEAAoE,CAAC,CAAC;aAC5F;YAED,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;gBACb,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;aAClF;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,KAAK,EAAE,KAAa,EAAkC,EAAE;;gBACtD,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;oBACzC,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;wBAChD,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;wBACpD,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;wBAC5C,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;wBACxD,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;qBAC1C;oBACD,KAAK,EAAE,aAAa,CAAC;wBACnB,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;wBAC1C,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;gBAEF,qBAAqB;gBACrB,eAAe;gBACf,0CAA0C;gBAC1C,2BAA2B;gBAC3B,mDAAmD;gBACnD,KAAK;gBAEL,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBAC7B,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,sBAAsB,EACzB,CACH,CAAC,kBAAmB,CAAC;YACxB,CAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;aAC/B,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,kDAAkD;gBAClD,OAAO,KAAK,CAAC;aACd;iBAAM,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;gBACpB,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;oBAC7D,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;gBACA,kDAAkD;gBAClD,OAAO,IAAI,CAAC;aACb;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAoC,EAAE;;QAEtC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,iBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,uCACK,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,IAC3F;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,iBAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,cAAc,CACzB,UAA6B,EAAE;;QAE/B,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,wEAAwE;iBAClF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,QAAQ,CAAC,UAA+B,EAAE;QACrD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAGxC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE;;QAC/D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,+BACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,IACtB,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,OAAO,CAAC,UAA8B,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,GACxD,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,cAAc,CACzB,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,KAAK,CAAC,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE;QAIzC,MAAM,MAAM,GAAyB;YACnC,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;YAC7D,aAAa,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;YACvD,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;YAC5C,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,uBAAuB,EAAE,OAAO;SACjC,CAAC,CAAC;QAEH,qDAAqD;QACrD,8DAA8D;QAC9D,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QAEpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,kBACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE;;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,kBAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,IACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE;;QAEhC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IA8CM,KAAK,CAAC,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,SAAsC,EAAE;QAExC,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;YACrB,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YACjD,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;SACzD;QACD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;gBACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;aACvB;YACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;gBACzB,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;aACvD;YACD,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;gBAC3B,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;aACvD;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;gBACd,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;gBACvB,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;aAC7D;YAED,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;gBACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;aACzB;YAED,0CAA0C;YAC1C,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,iCACpC,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;gBACH,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,UAAU,MAAM,uCAAuC,QAAQ,CAAC,aAAc,EAAE,CACjF,CAAC;iBACH;aACF;YAED,oEAAoE;YACpE,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;oBACF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;iBAC9B;gBAAC,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,0CAA0C,KAAK,qJAAqJ,KAAK,CAAC,OAAO,EAAE,CACpN,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;gBACzB,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,EAAE,CAC3F,CAAC;aACH;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;gBAC1E,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC5B,+BAA+B;oBAC/B,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;oBAC/B,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;wBACvC,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;qBACrC;oBACD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;qBACF,CAAC,CAAC;oBACH,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;oBAC5C,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;oBACvE,qEAAqE;oBACrE,sEAAsE;oBACtE,oDAAoD;oBACpD,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;qBACvD;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YACjB,OAAO,MAAM,CAAC;SACf;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,cAAc,CACzB,QAAgB,EAChB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,kCAC7C,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;aACpE;YAED,iEAAiE;YAChE,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;YACjD,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAEO,+BAA+B;QACrC,IAAI,aAAa,CAAC;QAClB,IAAI,QAAQ,CAAC;QACb,IAAI;YACF,mCAAmC;YACnC,wEAAwE;YACxE,8DAA8D;YAC9D,8EAA8E;YAC9E,oEAAoE;YACpE,oGAAoG;YACpG,6DAA6D;YAE7D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,gEAAgE;gBAChE,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,mGAAmG;gBACnG,6HAA6H;gBAC7H,qDAAqD;gBACrD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;gBAC9E,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM;gBACL,iDAAiD;gBACjD,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;YAED,+GAA+G;YAC/G,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAClD,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAExC,mEAAmE;YACnE,0GAA0G;YAC1G,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;SACpC;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,KAAK,CAAC,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE;;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;oBACnE,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,IACvB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAkC;QACtD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,IACvB,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,wBAAwB,CACnC,OAA6C;QAE7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,iBACpD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,uBAAuB,IACvD,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,YAAY,CACvB,gBAAyB,EACzB,OAAiC;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,kBACzD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AA4ND;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,UAAU;IAsE9C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,qKAAqK;YACrK,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,+DAA+D;YAC/D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACrE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,MAAM,CAAC,UAAmC,EAAE;;QACvD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,kBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAA8C,EAAE;;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;QAC3F,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,iCACxB,cAAc,KACjB,UAAU,IACV,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAI,CAAC,UAAiC,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,iBACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAmkBD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,UAAU;IA8E7C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6BG;IACI,KAAK,CAAC,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;aAC3E;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;oBACZ,SAAS,EAAE,KAAK;oBAChB,UAAU,EAAE,KAAK;oBACjB,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;oBACxE,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;iBAC3E,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;aACzB,CAAC,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACI,KAAK,CAAC,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IAEI,KAAK,CAAC,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,gDACzD,OAAO,KACV,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,KAE1C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;oBAChD,qBAAqB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;oBAChE,iBAAiB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;oBACxD,uBAAuB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;oBACpE,YAAY,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,KACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;OAoBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE;QAE/C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;gBACxB,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;aAC1B;YAED,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;gBAC1B,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;aAC5B;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED,uBAAuB;IAEvB;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,MAAM,EAAE;gBACV,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;iBACf;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;oBACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAC5B;qBAAM;oBACL,IAAI,GAAG,IAAuB,CAAC;oBAC/B,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;iBACrE;gBAED,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAU,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;aACH;iBAAM;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;gBACrC,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;aACH;SACF;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;OAkBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;YAC5C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACK,KAAK,CAAC,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE;QAE5C,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;YACjF,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,EAAE,CAC3E,CAAC;SACH;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;YACjE,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;SAC9D;QACD,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;YAC7B,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;YACA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,EAAE,CACnF,CAAC;SACH;QAED,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;YAC3B,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;gBACnE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,2CAA2C,CAAC,CAAC;aAC1E;YACD,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;gBAC5D,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;oBACzD,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;iBACvD;aACF;SACF;QACD,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;gBACrC,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;aACtE;YAED,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,6DAA6D;oBAC3D,mCAAmC,qBAAqB,EAAE,CAC7D,CAAC;aACH;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;gBAClC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAkB,EAAE;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;oBAClD,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;oBACrC,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;oBACpE,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;oBAClC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACxB,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;qBAC9C,CAAC,CAAC;oBACH,0FAA0F;oBAC1F,kEAAkE;oBAClE,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;4BAClB,WAAW,EAAE,gBAAgB;yBAC9B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;SACxD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;gBAChB,OAAO,GAAG,EAAE,CACV,kBAAkB,CAAC,QAAQ,EAAE;oBAC3B,SAAS,EAAE,IAAI;oBACf,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ;oBAC1C,KAAK,EAAE,MAAM;iBACd,CAAC,CAAC;YACP,CAAC,EACD,IAAI,kCAEC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,YAAY,CACvB,MAAgB,EAChB,aAAqB,+BAA+B,EACpD,iBAAyB,CAAC,EAC1B,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;YAE/B,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;gBACzD,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACxB,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;iBAC9C,CAAC,CAAC;gBAEH,0FAA0F;gBAC1F,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;iBACvD;YACH,CAAC;YACD,kFAAkF;YAClF,2EAA2E;YAC3E,iDAAiD;YACjD,qCAAqC;YACrC,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CACpC,CAAC;YACF,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,kCACtC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAoaD;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,UAAU;IA8D5C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,kBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;YAC5C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,kCAC7B,OAAO,KACV,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,UAAsC,EAAE;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,kBAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CACrB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAqC,EAAE;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,kBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAwC,EAAE;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACK,KAAK,CAAC,qBAAqB,CACjC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;OAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;OAMG;IACY,kBAAkB,CAC/B,SAAiB,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;oBAMnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAsEG;IACI,cAAc,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,kBAC7E,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EACV,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,KACtB,MAAM,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,0CAAE,aAAa,KAE5C,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;oBACnB,MAAM,EAAE,MAAM;oBACd,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;;;OAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;OAOG;IACY,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;oBAOnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAE9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,oBAC/D,OAAO,EACV,CAAC;QACH,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,kBAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EAEb,CAAC;YACJ,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,kBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,kBACzE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,kBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLike, BlobBeginCopyFromURLResponse>\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js new file mode 100644 index 00000000..efc10f80 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js @@ -0,0 +1,1273 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { Container } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { StorageClient } from "./StorageClient"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, ProcessBlobItems, ProcessBlobPrefixes, toTags, truncatedISO8061Date, } from "./utils/utils.common"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +import { AppendBlobClient, BlobClient, BlockBlobClient, PageBlobClient, } from "./Clients"; +import { BlobBatchClient } from "./BlobBatchClient"; +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield __await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield __await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield __await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield __await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} +//# sourceMappingURL=ContainerClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map new file mode 100644 index 00000000..c38c3448 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerClient.js","sourceRoot":"","sources":["../../../src/ContainerClient.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAEjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAgCvD,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,gBAAgB,EAChB,qCAAqC,EACrC,0CAA0C,EAC1C,4BAA4B,EAC5B,iBAAiB,EACjB,4BAA4B,EAC5B,gBAAgB,EAChB,mBAAmB,EACnB,MAAM,EACN,oBAAoB,GACrB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,UAAU,EAEV,eAAe,EAGf,cAAc,GACf,MAAM,WAAW,CAAC;AACnB,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAyiBpD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,aAAa;IAgEhD,YACE,qBAA6B,EAC7B,mCAKgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAE1D,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;oBAE7E,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;SAC5E;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACnE,CAAC;IApID;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IAiID;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,gDAAgD;YAChD,uFAAuF;YACvF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iCACpC,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EACL,iFAAiF;iBACpF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,sDAAsD;iBAChE,CAAC,CAAC;gBACH,OAAO,KAAK,CAAC;aACd;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,aAAa,CAAC,QAAgB;QACnC,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAChG,CAAC;IAED;;;;OAIG;IACI,mBAAmB,CAAC,QAAgB;QACzC,OAAO,IAAI,gBAAgB,CACzB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,kBAAkB,CAAC,QAAgB;QACxC,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,QAAgB;QACvC,OAAO,IAAI,cAAc,CACvB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAyC,EAAE;QAE3C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,+BAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,OAAO,CAAC,UAAU,GAClB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,MAAM,CACjB,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,iEAAiE;iBAC3E,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE;QAEzC,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;YACxC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;SACH;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,eAAe,CAC1B,UAA2C,EAAE;QAE7C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;gBACzC,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;YAEF,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;oBAC3B,YAAY,GAAG;wBACb,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;oBAEF,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;wBACrC,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;qBACtE;oBAED,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;wBACpC,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACpE;iBACF;gBAED,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;YACxC,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;oBACP,YAAY,EAAE;wBACZ,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;4BAC1C,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;4BACzD,CAAC,CAAC,EAAE;wBACN,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;wBAChD,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;4BACxC,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;4BACxD,CAAC,CAAC,EAAE;qBACP;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAChD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;OAqBG;IACI,KAAK,CAAC,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAC1D,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aACxD;YACD,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SAChD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACK,KAAK,CAAC,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,+BAC9D,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;gBACnD,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;aAClF;YAED,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAElF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACK,KAAK,CAAC,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;gBACnD,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;aAClF;YAED,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;YACnC,IAAK,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,KAAK,SAAS,EAAE;gBACzD,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,mBAAmB,CAChD,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,CACxC,CAAC;aACH;YAED,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAEvF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,EACF,YAAY,EAAE,MAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;wBACtE,MAAM,UAAU,GAAe;4BAC7B,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;yBAChD,CAAC;wBACF,OAAO,UAAU,CAAC;oBACpB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,4BAA4B,CAAC;YACjC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,cAAM,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAC/E,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;oBACxD,oBAAM,cAAM,4BAA4B,CAAA,CAAA,CAAC;iBAC1C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAiD,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;oBAC3C,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;iBACvD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqEG;IACI,aAAa,CAClB,UAAqC,EAAE;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QAEF,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;OAYG;IACY,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,iCAAiC,CAAC;YACtC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,iCAAiC,GAAG,cAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;oBAC7D,oBAAM,cAAM,iCAAiC,CAAA,CAAA,CAAC;iBAC/C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;OAKG;IACY,oBAAoB,CACjC,SAAiB,EACjB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAsD,IAAA,KAAA,cAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;oBAKhD,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;wBACxB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;4BACzC,oCACE,IAAI,EAAE,QAAQ,IACX,MAAM,EACV,CAAC;yBACH;qBACF;oBACD,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;wBACpC,oCAAQ,IAAI,EAAE,MAAM,IAAK,IAAI,EAAE,CAAC;qBACjC;iBACF;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6EG;IACI,oBAAoB,CACzB,SAAiB,EACjB,UAAqC,EAAE;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;SACzE;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QACF,gEAAgE;QAChE,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;YACL;;eAEG;YACH,KAAK,CAAC,IAAI;gBACR,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,kBACrE,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;QAEpD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;;YAEpD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAkD,EAAE;;;YAEpD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAA0C,EAAE;QAE5C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAEO,uBAAuB;QAC7B,IAAI,aAAa,CAAC;QAClB,IAAI;YACF,mCAAmC;YACnC,mEAAmE;YACnE,yDAAyD;YACzD,+FAA+F;YAC/F,wDAAwD;YAExD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,2DAA2D;gBAC3D,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,8FAA8F;gBAC9F,wHAAwH;gBACxH,gDAAgD;gBAChD,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM;gBACL,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;YAED,mGAAmG;YACnG,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,aAAa,CAAC;SACtB;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAuC;QAC3D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n ProcessBlobItems,\n ProcessBlobPrefixes,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n response.segment.blobPrefixes = [];\n if ((response.segment as any)[\"BlobPrefix\"] !== undefined) {\n response.segment.blobPrefixes = ProcessBlobPrefixes(\n (response.segment as any)[\"BlobPrefix\"]\n );\n }\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js new file mode 100644 index 00000000..4854ba6f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +export function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} +//# sourceMappingURL=PageBlobRangeResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map new file mode 100644 index 00000000..d442d121 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PageBlobRangeResponse.js","sourceRoot":"","sources":["../../../src/PageBlobRangeResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA8ElC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,QAAqF;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,uCACK,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,OAEH;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js new file mode 100644 index 00000000..bbb977ae --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, disableResponseDecompressionPolicy, HttpHeaders, RequestPolicyOptions, WebResource, proxyPolicy, isNode, isTokenCredential, tracingPolicy, logPolicy, keepAlivePolicy, generateClientRequestIdPolicy, } from "@azure/core-http"; +import { logger } from "./log"; +import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory"; +import { StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, } from "./utils/constants"; +import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory"; +import { getCachedDefaultHttpClient } from "./utils/cache"; +import { attachCredential } from "./utils/utils.common"; +import { storageBearerTokenChallengeAuthenticationPolicy } from "./policies/StorageBearerTokenChallengeAuthenticationPolicy"; +// Export following interfaces and types for customers who want to implement their +// own RequestPolicy or HTTPClient +export { BaseRequestPolicy, StorageOAuthScopes, deserializationPolicy, HttpHeaders, WebResource, RequestPolicyOptions, }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + deserializationPolicy(undefined, { xmlCharKey: "#" }), + logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(disableResponseDecompressionPolicy()); + } + factories.push(isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} +//# sourceMappingURL=Pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map new file mode 100644 index 00000000..58db6655 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Pipeline.js","sourceRoot":"","sources":["../../../src/Pipeline.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,kCAAkC,EAElC,WAAW,EAKX,oBAAoB,EAEpB,WAAW,EACX,WAAW,EACX,MAAM,EAEN,iBAAiB,EACjB,aAAa,EACb,SAAS,EAET,eAAe,EAEf,6BAA6B,GAE9B,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,2BAA2B,EAAE,MAAM,+BAA+B,CAAC;AAC5E,OAAO,EAAuB,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AAE7F,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EACL,kBAAkB,EAClB,oCAAoC,EACpC,wCAAwC,GACzC,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,0BAA0B,EAAE,MAAM,eAAe,CAAC;AAC3D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,+CAA+C,EAAE,MAAM,4DAA4D,CAAC;AAE7H,kFAAkF;AAClF,kCAAkC;AAClC,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,qBAAqB,EAErB,WAAW,EAGX,WAAW,EAGX,oBAAoB,GACrB,CAAC;AAsCF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,QAAiB;IAC9C,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;QAC7C,OAAO,KAAK,CAAC;KACd;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,OAAO,CACL,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;QACrC,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;QACxC,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,CAC1D,CAAC;AACJ,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,OAAO,QAAQ;IAUnB;;;;;OAKG;IACH,YAAY,SAAiC,EAAE,UAA2B,EAAE;QAC1E,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,uFAAuF;QACvF,kDAAkD;QAClD,IAAI,CAAC,OAAO,mCACP,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,sBAAsB;QAC3B,OAAO;YACL,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;IACJ,CAAC;CACF;AAgCD;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;KACxC;IAED,0FAA0F;IAC1F,mFAAmF;IACnF,sEAAsE;IAEtE,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;IACrF,MAAM,SAAS,GAA2B;QACxC,aAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;QAC7D,eAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;QACf,6BAA6B,EAAE;QAC/B,IAAI,2BAA2B,EAAE;QACjC,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;QAC3D,8DAA8D;QAC9D,sEAAsE;QACtE,eAAe;QACf,qBAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QACrD,SAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;YACnB,kBAAkB,EAAE,oCAAoC;YACxD,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,8DAA8D;QAC9D,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACtD;IACD,SAAS,CAAC,IAAI,CACZ,iBAAiB,CAAC,UAAU,CAAC;QAC3B,CAAC,CAAC,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,MAAA,eAAe,CAAC,QAAQ,mCAAI,kBAAkB,CAC/C,EACD,UAAU,CACX;QACH,CAAC,CAAC,UAAU,CACf,CAAC;IAEF,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js new file mode 100644 index 00000000..ea83a0c3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +export function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} +//# sourceMappingURL=Range.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map new file mode 100644 index 00000000..a501b758 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Range.js","sourceRoot":"","sources":["../../../src/Range.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,MAAa;IACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;QACrB,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC;KAChE;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;QACrC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;KACH;IACD,OAAO,MAAM,CAAC,KAAK;QACjB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE;QAC9D,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js new file mode 100644 index 00000000..e35175c9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageBrowserPolicy } from "./policies/StorageBrowserPolicy"; +export { StorageBrowserPolicy }; +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=StorageBrowserPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map new file mode 100644 index 00000000..655540db --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageBrowserPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,oBAAoB,EAAE,CAAC;AAEhC;;GAEG;AACH,MAAM,OAAO,2BAA2B;IACtC;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js new file mode 100644 index 00000000..c36e4ace --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from "./utils/utils.common"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { isTokenCredential, isNode } from "@azure/core-http"; +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +export class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} +//# sourceMappingURL=StorageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map new file mode 100644 index 00000000..d7bf4fc3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageClient.js","sourceRoot":"","sources":["../../../src/StorageClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAE5E,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAClG,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAmB,iBAAiB,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAa9E;;;GAGG;AACH,MAAM,OAAgB,aAAa;IAyBjC;;;;OAIG;IACH,YAAsB,GAAW,EAAE,QAAsB;QACvD,iFAAiF;QACjF,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;QAE7D,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;YAC7C,IACE,CAAC,MAAM,IAAI,OAAO,YAAY,0BAA0B,CAAC;gBACzD,OAAO,YAAY,mBAAmB,EACtC;gBACA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;aAC3B;iBAAM,IAAI,iBAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;gBACzD,uEAAuE;gBACvE,0DAA0D;gBAC1D,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;aAC/C;SACF;QAED,iDAAiD;QACjD,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC9D,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js new file mode 100644 index 00000000..4f057291 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageRetryPolicy, StorageRetryPolicyType } from "./policies/StorageRetryPolicy"; +export { StorageRetryPolicyType, StorageRetryPolicy }; +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} +//# sourceMappingURL=StorageRetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map new file mode 100644 index 00000000..9cfff583 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageRetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAE3F,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,CAAC;AAmDtD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IAGpC;;;OAGG;IACH,YAAY,YAAkC;QAC5C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js new file mode 100644 index 00000000..a2e71a3b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode, } from "@azure/core-http"; +import * as os from "os"; +import { TelemetryPolicy } from "./policies/TelemetryPolicy"; +import { SDK_VERSION } from "./utils/constants"; +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +export class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} +//# sourceMappingURL=TelemetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map new file mode 100644 index 00000000..ee8cbcac --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/TelemetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,MAAM,GAKP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC7D,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD;;GAEG;AACH,MAAM,OAAO,sBAAsB;IAMjC;;;OAGG;IACH,YAAY,SAA4B;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,IAAI,MAAM,EAAE;YACV,IAAI,SAAS,EAAE;gBACb,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;gBACxD,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC/E,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;iBACrC;aACF;YAED,mCAAmC;YACnC,MAAM,OAAO,GAAG,wBAAwB,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;gBACzC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;aAC7B;YAED,mDAAmD;YACnD,IAAI,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;YACtD,IAAI,EAAE,EAAE;gBACN,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,KAAK,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG,CAAC;aACjF;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7C,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aACjC;SACF;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js new file mode 100644 index 00000000..2511e144 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AnonymousCredentialPolicy } from "../policies/AnonymousCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map new file mode 100644 index 00000000..6819d443 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredential.js","sourceRoot":"","sources":["../../../../src/credentials/AnonymousCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,yBAAyB,EAAE,MAAM,uCAAuC,CAAC;AAClF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;;GAKG;AACH,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js new file mode 100644 index 00000000..6db7be6a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +export class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} +//# sourceMappingURL=Credential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map new file mode 100644 index 00000000..a7948dc9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Credential.js","sourceRoot":"","sources":["../../../../src/credentials/Credential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,OAAgB,UAAU;IAC9B;;;;;OAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B;QACtE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;IACvE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js new file mode 100644 index 00000000..1235e863 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class StorageSharedKeyCredential { +} +//# sourceMappingURL=StorageSharedKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map new file mode 100644 index 00000000..12177de4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,0BAA0B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class StorageSharedKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js new file mode 100644 index 00000000..c47b1ceb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +import { StorageSharedKeyCredentialPolicy } from "../policies/StorageSharedKeyCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=StorageSharedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map new file mode 100644 index 00000000..c73c0e40 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC,OAAO,EAAE,gCAAgC,EAAE,MAAM,8CAA8C,CAAC;AAChG,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;GAIG;AACH,MAAM,OAAO,0BAA2B,SAAQ,UAAU;IAWxD;;;;OAIG;IACH,YAAY,WAAmB,EAAE,UAAkB;QACjD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IACzE,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js new file mode 100644 index 00000000..cffcb3ce --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class UserDelegationKeyCredential { +} +//# sourceMappingURL=UserDelegationKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map new file mode 100644 index 00000000..0a1e590c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,2BAA2B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class UserDelegationKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js new file mode 100644 index 00000000..fa438f69 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +export class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=UserDelegationKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map new file mode 100644 index 00000000..df787bb7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;;;;GAKG;AACH,MAAM,OAAO,2BAA2B;IAgBtC;;;;OAIG;IACH,YAAY,WAAmB,EAAE,iBAAoC;QACnE,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAC5D,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,gEAAgE;QAEhE,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACtF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js new file mode 100644 index 00000000..290e550a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js @@ -0,0 +1,11 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./models"; +export { StorageClient } from "./storageClient"; +export { StorageClientContext } from "./storageClientContext"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map new file mode 100644 index 00000000..5601aab1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/generated/src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./models\";\nexport { StorageClient } from \"./storageClient\";\nexport { StorageClientContext } from \"./storageClientContext\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js new file mode 100644 index 00000000..95933448 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map new file mode 100644 index 00000000..4fc3835a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\n/** Storage Service Properties. */\nexport interface BlobServiceProperties {\n /** Azure Analytics Logging settings. */\n blobAnalyticsLogging?: Logging;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n hourMetrics?: Metrics;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n minuteMetrics?: Metrics;\n /** The set of CORS rules. */\n cors?: CorsRule[];\n /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */\n defaultServiceVersion?: string;\n /** the retention policy which determines how long the associated data should persist */\n deleteRetentionPolicy?: RetentionPolicy;\n /** The properties that enable an account to host a static website */\n staticWebsite?: StaticWebsite;\n}\n\n/** Azure Analytics Logging settings. */\nexport interface Logging {\n /** The version of Storage Analytics to configure. */\n version: string;\n /** Indicates whether all delete requests should be logged. */\n deleteProperty: boolean;\n /** Indicates whether all read requests should be logged. */\n read: boolean;\n /** Indicates whether all write requests should be logged. */\n write: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy: RetentionPolicy;\n}\n\n/** the retention policy which determines how long the associated data should persist */\nexport interface RetentionPolicy {\n /** Indicates whether a retention policy is enabled for the storage service */\n enabled: boolean;\n /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */\n days?: number;\n}\n\n/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\nexport interface Metrics {\n /** The version of Storage Analytics to configure. */\n version?: string;\n /** Indicates whether metrics are enabled for the Blob service. */\n enabled: boolean;\n /** Indicates whether metrics should generate summary statistics for called API operations. */\n includeAPIs?: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy?: RetentionPolicy;\n}\n\n/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */\nexport interface CorsRule {\n /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */\n allowedOrigins: string;\n /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */\n allowedMethods: string;\n /** the request headers that the origin domain may specify on the CORS request. */\n allowedHeaders: string;\n /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */\n exposedHeaders: string;\n /** The maximum amount time that a browser should cache the preflight OPTIONS request. */\n maxAgeInSeconds: number;\n}\n\n/** The properties that enable an account to host a static website */\nexport interface StaticWebsite {\n /** Indicates whether this account is hosting a static website */\n enabled: boolean;\n /** The default name of the index page under each directory */\n indexDocument?: string;\n /** The absolute path of the custom 404 page */\n errorDocument404Path?: string;\n /** Absolute path of the default index page */\n defaultIndexDocumentPath?: string;\n}\n\nexport interface StorageError {\n message?: string;\n code?: string;\n}\n\n/** Stats for the storage service. */\nexport interface BlobServiceStatistics {\n /** Geo-Replication information for the Secondary Storage Service */\n geoReplication?: GeoReplication;\n}\n\n/** Geo-Replication information for the Secondary Storage Service */\nexport interface GeoReplication {\n /** The status of the secondary location */\n status: GeoReplicationStatusType;\n /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */\n lastSyncOn: Date;\n}\n\n/** An enumeration of containers */\nexport interface ListContainersSegmentResponse {\n serviceEndpoint: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n containerItems: ContainerItem[];\n continuationToken?: string;\n}\n\n/** An Azure Storage container */\nexport interface ContainerItem {\n name: string;\n deleted?: boolean;\n version?: string;\n /** Properties of a container */\n properties: ContainerProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Properties of a container */\nexport interface ContainerProperties {\n lastModified: Date;\n etag: string;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n publicAccess?: PublicAccessType;\n hasImmutabilityPolicy?: boolean;\n hasLegalHold?: boolean;\n defaultEncryptionScope?: string;\n preventEncryptionScopeOverride?: boolean;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n /** Indicates if version level worm is enabled on this container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n}\n\n/** Key information */\nexport interface KeyInfo {\n /** The date-time the key is active in ISO 8601 UTC time */\n startsOn: string;\n /** The date-time the key expires in ISO 8601 UTC time */\n expiresOn: string;\n}\n\n/** A user delegation key */\nexport interface UserDelegationKey {\n /** The Azure Active Directory object ID in GUID format. */\n signedObjectId: string;\n /** The Azure Active Directory tenant ID in GUID format */\n signedTenantId: string;\n /** The date-time the key is active */\n signedStartsOn: string;\n /** The date-time the key expires */\n signedExpiresOn: string;\n /** Abbreviation of the Azure Storage service that accepts the key */\n signedService: string;\n /** The service version that created the key */\n signedVersion: string;\n /** The key as a base64 string */\n value: string;\n}\n\n/** The result of a Filter Blobs API call */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\n/** Blob info from a Filter Blobs API call */\nexport interface FilterBlobItem {\n name: string;\n containerName: string;\n /** Blob tags */\n tags?: BlobTags;\n}\n\n/** Blob tags */\nexport interface BlobTags {\n blobTagSet: BlobTag[];\n}\n\nexport interface BlobTag {\n key: string;\n value: string;\n}\n\n/** signed identifier */\nexport interface SignedIdentifier {\n /** a unique id */\n id: string;\n /** An Access policy */\n accessPolicy: AccessPolicy;\n}\n\n/** An Access policy */\nexport interface AccessPolicy {\n /** the date-time the policy is active */\n startsOn?: string;\n /** the date-time the policy expires */\n expiresOn?: string;\n /** the permissions for the acl policy */\n permissions?: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegment {\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: BlobName;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobPropertiesInternal;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\nexport interface BlobName {\n /** Indicates if the blob name is encoded. */\n encoded?: boolean;\n /** The name of the blob. */\n content?: string;\n}\n\n/** Properties of a blob */\nexport interface BlobPropertiesInternal {\n createdOn?: Date;\n lastModified: Date;\n etag: string;\n /** Size in bytes */\n contentLength?: number;\n contentType?: string;\n contentEncoding?: string;\n contentLanguage?: string;\n contentMD5?: Uint8Array;\n contentDisposition?: string;\n cacheControl?: string;\n blobSequenceNumber?: number;\n blobType?: BlobType;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n copyId?: string;\n copyStatus?: CopyStatusType;\n copySource?: string;\n copyProgress?: string;\n copyCompletedOn?: Date;\n copyStatusDescription?: string;\n serverEncrypted?: boolean;\n incrementalCopy?: boolean;\n destinationSnapshot?: string;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n accessTier?: AccessTier;\n accessTierInferred?: boolean;\n archiveStatus?: ArchiveStatus;\n customerProvidedKeySha256?: string;\n /** The name of the encryption scope under which the blob is encrypted. */\n encryptionScope?: string;\n accessTierChangedOn?: Date;\n tagCount?: number;\n expiresOn?: Date;\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */\n rehydratePriority?: RehydratePriority;\n lastAccessedOn?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\nexport interface BlobPrefix {\n name: BlobName;\n}\n\nexport interface BlockLookupList {\n committed?: string[];\n uncommitted?: string[];\n latest?: string[];\n}\n\nexport interface BlockList {\n committedBlocks?: Block[];\n uncommittedBlocks?: Block[];\n}\n\n/** Represents a single block in a block blob. It describes the block's ID and size. */\nexport interface Block {\n /** The base64 encoded block ID. */\n name: string;\n /** The block size in bytes. */\n size: number;\n}\n\n/** the list of pages */\nexport interface PageList {\n pageRange?: PageRange[];\n clearRange?: ClearRange[];\n continuationToken?: string;\n}\n\nexport interface PageRange {\n start: number;\n end: number;\n}\n\nexport interface ClearRange {\n start: number;\n end: number;\n}\n\n/** Groups the set of query request settings. */\nexport interface QueryRequest {\n /** Required. The type of the provided query expression. */\n queryType: string;\n /** The query expression in SQL. The maximum size of the query expression is 256KiB. */\n expression: string;\n inputSerialization?: QuerySerialization;\n outputSerialization?: QuerySerialization;\n}\n\nexport interface QuerySerialization {\n format: QueryFormat;\n}\n\nexport interface QueryFormat {\n /** The quick query format type. */\n type: QueryFormatType;\n /** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\n delimitedTextConfiguration?: DelimitedTextConfiguration;\n /** json text configuration */\n jsonTextConfiguration?: JsonTextConfiguration;\n /** Groups the settings used for formatting the response if the response should be Arrow formatted. */\n arrowConfiguration?: ArrowConfiguration;\n /** Any object */\n parquetTextConfiguration?: any;\n}\n\n/** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\nexport interface DelimitedTextConfiguration {\n /** The string used to separate columns. */\n columnSeparator?: string;\n /** The string used to quote a specific field. */\n fieldQuote?: string;\n /** The string used to separate records. */\n recordSeparator?: string;\n /** The string used as an escape character. */\n escapeChar?: string;\n /** Represents whether the data has headers. */\n headersPresent?: boolean;\n}\n\n/** json text configuration */\nexport interface JsonTextConfiguration {\n /** The string used to separate records. */\n recordSeparator?: string;\n}\n\n/** Groups the settings used for formatting the response if the response should be Arrow formatted. */\nexport interface ArrowConfiguration {\n schema: ArrowField[];\n}\n\n/** Groups settings regarding specific field of an arrow schema */\nexport interface ArrowField {\n type: string;\n name?: string;\n precision?: number;\n scale?: number;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */\n isHierarchicalNamespaceEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesHeaders {\n metadata?: { [propertyName: string]: string };\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** Indicates whether the container has an immutability policy set on it. */\n hasImmutabilityPolicy?: boolean;\n /** Indicates whether the container has a legal hold. */\n hasLegalHold?: boolean;\n /** The default encryption scope for the container. */\n defaultEncryptionScope?: string;\n /** Indicates whether the container's default encryption scope can be overriden. */\n denyEncryptionScopeOverride?: boolean;\n /** Indicates whether version level worm is enabled on a container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyHeaders {\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesHeaders {\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Returns the date and time the blob was created. */\n createdOn?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Included if the blob is incremental copy blob. */\n isIncrementalCopy?: boolean;\n /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */\n destinationSnapshot?: string;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */\n contentLength?: number;\n /** The content type specified for the blob. The default content type is 'application/octet-stream' */\n contentType?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */\n accessTier?: string;\n /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */\n accessTierInferred?: boolean;\n /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */\n archiveStatus?: string;\n /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */\n accessTierChangedOn?: Date;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** The time this blob will expire. */\n expiresOn?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */\n rehydratePriority?: RehydratePriority;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates the time the immutability policy will expire. */\n immutabilityPolicyExpiry?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates if the blob has a legal hold. */\n legalHold?: boolean;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotHeaders {\n /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */\n snapshot?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: SyncCopyStatusType;\n /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */\n contentMD5?: Uint8Array;\n /** This response header is returned so that the client can check for the integrity of the copied content. */\n xMsContentCrc64?: Uint8Array;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n metadata?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletionTime?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The media type of the body of the response. For Get Block List this is 'application/xml' */\n contentType?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Parameter group */\nexport interface ContainerEncryptionScope {\n /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */\n defaultEncryptionScope?: string;\n /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */\n preventEncryptionScopeOverride?: boolean;\n}\n\n/** Parameter group */\nexport interface LeaseAccessConditions {\n /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */\n leaseId?: string;\n}\n\n/** Parameter group */\nexport interface ModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n ifModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n ifUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n ifMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n ifNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n ifTags?: string;\n}\n\n/** Parameter group */\nexport interface CpkInfo {\n /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionKey?: string;\n /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */\n encryptionKeySha256?: string;\n /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is \"AES256\". Must be provided if the x-ms-encryption-key header is provided. */\n encryptionAlgorithm?: EncryptionAlgorithmType;\n}\n\n/** Parameter group */\nexport interface BlobHttpHeaders {\n /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */\n blobCacheControl?: string;\n /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */\n blobContentType?: string;\n /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */\n blobContentMD5?: Uint8Array;\n /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */\n blobContentEncoding?: string;\n /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */\n blobContentLanguage?: string;\n /** Optional. Sets the blob's Content-Disposition header. */\n blobContentDisposition?: string;\n}\n\n/** Parameter group */\nexport interface SourceModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n sourceIfModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n sourceIfUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n sourceIfMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n sourceIfNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n sourceIfTags?: string;\n}\n\n/** Parameter group */\nexport interface SequenceNumberAccessConditions {\n /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */\n ifSequenceNumberLessThanOrEqualTo?: number;\n /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */\n ifSequenceNumberLessThan?: number;\n /** Specify this header value to operate only on a blob if it has the specified sequence number. */\n ifSequenceNumberEqualTo?: number;\n}\n\n/** Parameter group */\nexport interface AppendPositionAccessConditions {\n /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n maxSize?: number;\n /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n appendPosition?: number;\n}\n\n/** Known values of {@link BlobExpiryOptions} that the service accepts. */\nexport const enum KnownBlobExpiryOptions {\n NeverExpire = \"NeverExpire\",\n RelativeToCreation = \"RelativeToCreation\",\n RelativeToNow = \"RelativeToNow\",\n Absolute = \"Absolute\"\n}\n\n/**\n * Defines values for BlobExpiryOptions. \\\n * {@link KnownBlobExpiryOptions} can be used interchangeably with BlobExpiryOptions,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **NeverExpire** \\\n * **RelativeToCreation** \\\n * **RelativeToNow** \\\n * **Absolute**\n */\nexport type BlobExpiryOptions = string;\n\n/** Known values of {@link StorageErrorCode} that the service accepts. */\nexport const enum KnownStorageErrorCode {\n AccountAlreadyExists = \"AccountAlreadyExists\",\n AccountBeingCreated = \"AccountBeingCreated\",\n AccountIsDisabled = \"AccountIsDisabled\",\n AuthenticationFailed = \"AuthenticationFailed\",\n AuthorizationFailure = \"AuthorizationFailure\",\n ConditionHeadersNotSupported = \"ConditionHeadersNotSupported\",\n ConditionNotMet = \"ConditionNotMet\",\n EmptyMetadataKey = \"EmptyMetadataKey\",\n InsufficientAccountPermissions = \"InsufficientAccountPermissions\",\n InternalError = \"InternalError\",\n InvalidAuthenticationInfo = \"InvalidAuthenticationInfo\",\n InvalidHeaderValue = \"InvalidHeaderValue\",\n InvalidHttpVerb = \"InvalidHttpVerb\",\n InvalidInput = \"InvalidInput\",\n InvalidMd5 = \"InvalidMd5\",\n InvalidMetadata = \"InvalidMetadata\",\n InvalidQueryParameterValue = \"InvalidQueryParameterValue\",\n InvalidRange = \"InvalidRange\",\n InvalidResourceName = \"InvalidResourceName\",\n InvalidUri = \"InvalidUri\",\n InvalidXmlDocument = \"InvalidXmlDocument\",\n InvalidXmlNodeValue = \"InvalidXmlNodeValue\",\n Md5Mismatch = \"Md5Mismatch\",\n MetadataTooLarge = \"MetadataTooLarge\",\n MissingContentLengthHeader = \"MissingContentLengthHeader\",\n MissingRequiredQueryParameter = \"MissingRequiredQueryParameter\",\n MissingRequiredHeader = \"MissingRequiredHeader\",\n MissingRequiredXmlNode = \"MissingRequiredXmlNode\",\n MultipleConditionHeadersNotSupported = \"MultipleConditionHeadersNotSupported\",\n OperationTimedOut = \"OperationTimedOut\",\n OutOfRangeInput = \"OutOfRangeInput\",\n OutOfRangeQueryParameterValue = \"OutOfRangeQueryParameterValue\",\n RequestBodyTooLarge = \"RequestBodyTooLarge\",\n ResourceTypeMismatch = \"ResourceTypeMismatch\",\n RequestUrlFailedToParse = \"RequestUrlFailedToParse\",\n ResourceAlreadyExists = \"ResourceAlreadyExists\",\n ResourceNotFound = \"ResourceNotFound\",\n ServerBusy = \"ServerBusy\",\n UnsupportedHeader = \"UnsupportedHeader\",\n UnsupportedXmlNode = \"UnsupportedXmlNode\",\n UnsupportedQueryParameter = \"UnsupportedQueryParameter\",\n UnsupportedHttpVerb = \"UnsupportedHttpVerb\",\n AppendPositionConditionNotMet = \"AppendPositionConditionNotMet\",\n BlobAlreadyExists = \"BlobAlreadyExists\",\n BlobImmutableDueToPolicy = \"BlobImmutableDueToPolicy\",\n BlobNotFound = \"BlobNotFound\",\n BlobOverwritten = \"BlobOverwritten\",\n BlobTierInadequateForContentLength = \"BlobTierInadequateForContentLength\",\n BlobUsesCustomerSpecifiedEncryption = \"BlobUsesCustomerSpecifiedEncryption\",\n BlockCountExceedsLimit = \"BlockCountExceedsLimit\",\n BlockListTooLong = \"BlockListTooLong\",\n CannotChangeToLowerTier = \"CannotChangeToLowerTier\",\n CannotVerifyCopySource = \"CannotVerifyCopySource\",\n ContainerAlreadyExists = \"ContainerAlreadyExists\",\n ContainerBeingDeleted = \"ContainerBeingDeleted\",\n ContainerDisabled = \"ContainerDisabled\",\n ContainerNotFound = \"ContainerNotFound\",\n ContentLengthLargerThanTierLimit = \"ContentLengthLargerThanTierLimit\",\n CopyAcrossAccountsNotSupported = \"CopyAcrossAccountsNotSupported\",\n CopyIdMismatch = \"CopyIdMismatch\",\n FeatureVersionMismatch = \"FeatureVersionMismatch\",\n IncrementalCopyBlobMismatch = \"IncrementalCopyBlobMismatch\",\n IncrementalCopyOfEralierVersionSnapshotNotAllowed = \"IncrementalCopyOfEralierVersionSnapshotNotAllowed\",\n IncrementalCopySourceMustBeSnapshot = \"IncrementalCopySourceMustBeSnapshot\",\n InfiniteLeaseDurationRequired = \"InfiniteLeaseDurationRequired\",\n InvalidBlobOrBlock = \"InvalidBlobOrBlock\",\n InvalidBlobTier = \"InvalidBlobTier\",\n InvalidBlobType = \"InvalidBlobType\",\n InvalidBlockId = \"InvalidBlockId\",\n InvalidBlockList = \"InvalidBlockList\",\n InvalidOperation = \"InvalidOperation\",\n InvalidPageRange = \"InvalidPageRange\",\n InvalidSourceBlobType = \"InvalidSourceBlobType\",\n InvalidSourceBlobUrl = \"InvalidSourceBlobUrl\",\n InvalidVersionForPageBlobOperation = \"InvalidVersionForPageBlobOperation\",\n LeaseAlreadyPresent = \"LeaseAlreadyPresent\",\n LeaseAlreadyBroken = \"LeaseAlreadyBroken\",\n LeaseIdMismatchWithBlobOperation = \"LeaseIdMismatchWithBlobOperation\",\n LeaseIdMismatchWithContainerOperation = \"LeaseIdMismatchWithContainerOperation\",\n LeaseIdMismatchWithLeaseOperation = \"LeaseIdMismatchWithLeaseOperation\",\n LeaseIdMissing = \"LeaseIdMissing\",\n LeaseIsBreakingAndCannotBeAcquired = \"LeaseIsBreakingAndCannotBeAcquired\",\n LeaseIsBreakingAndCannotBeChanged = \"LeaseIsBreakingAndCannotBeChanged\",\n LeaseIsBrokenAndCannotBeRenewed = \"LeaseIsBrokenAndCannotBeRenewed\",\n LeaseLost = \"LeaseLost\",\n LeaseNotPresentWithBlobOperation = \"LeaseNotPresentWithBlobOperation\",\n LeaseNotPresentWithContainerOperation = \"LeaseNotPresentWithContainerOperation\",\n LeaseNotPresentWithLeaseOperation = \"LeaseNotPresentWithLeaseOperation\",\n MaxBlobSizeConditionNotMet = \"MaxBlobSizeConditionNotMet\",\n NoAuthenticationInformation = \"NoAuthenticationInformation\",\n NoPendingCopyOperation = \"NoPendingCopyOperation\",\n OperationNotAllowedOnIncrementalCopyBlob = \"OperationNotAllowedOnIncrementalCopyBlob\",\n PendingCopyOperation = \"PendingCopyOperation\",\n PreviousSnapshotCannotBeNewer = \"PreviousSnapshotCannotBeNewer\",\n PreviousSnapshotNotFound = \"PreviousSnapshotNotFound\",\n PreviousSnapshotOperationNotSupported = \"PreviousSnapshotOperationNotSupported\",\n SequenceNumberConditionNotMet = \"SequenceNumberConditionNotMet\",\n SequenceNumberIncrementTooLarge = \"SequenceNumberIncrementTooLarge\",\n SnapshotCountExceeded = \"SnapshotCountExceeded\",\n SnapshotOperationRateExceeded = \"SnapshotOperationRateExceeded\",\n SnapshotsPresent = \"SnapshotsPresent\",\n SourceConditionNotMet = \"SourceConditionNotMet\",\n SystemInUse = \"SystemInUse\",\n TargetConditionNotMet = \"TargetConditionNotMet\",\n UnauthorizedBlobOverwrite = \"UnauthorizedBlobOverwrite\",\n BlobBeingRehydrated = \"BlobBeingRehydrated\",\n BlobArchived = \"BlobArchived\",\n BlobNotArchived = \"BlobNotArchived\",\n AuthorizationSourceIPMismatch = \"AuthorizationSourceIPMismatch\",\n AuthorizationProtocolMismatch = \"AuthorizationProtocolMismatch\",\n AuthorizationPermissionMismatch = \"AuthorizationPermissionMismatch\",\n AuthorizationServiceMismatch = \"AuthorizationServiceMismatch\",\n AuthorizationResourceTypeMismatch = \"AuthorizationResourceTypeMismatch\"\n}\n\n/**\n * Defines values for StorageErrorCode. \\\n * {@link KnownStorageErrorCode} can be used interchangeably with StorageErrorCode,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **AccountAlreadyExists** \\\n * **AccountBeingCreated** \\\n * **AccountIsDisabled** \\\n * **AuthenticationFailed** \\\n * **AuthorizationFailure** \\\n * **ConditionHeadersNotSupported** \\\n * **ConditionNotMet** \\\n * **EmptyMetadataKey** \\\n * **InsufficientAccountPermissions** \\\n * **InternalError** \\\n * **InvalidAuthenticationInfo** \\\n * **InvalidHeaderValue** \\\n * **InvalidHttpVerb** \\\n * **InvalidInput** \\\n * **InvalidMd5** \\\n * **InvalidMetadata** \\\n * **InvalidQueryParameterValue** \\\n * **InvalidRange** \\\n * **InvalidResourceName** \\\n * **InvalidUri** \\\n * **InvalidXmlDocument** \\\n * **InvalidXmlNodeValue** \\\n * **Md5Mismatch** \\\n * **MetadataTooLarge** \\\n * **MissingContentLengthHeader** \\\n * **MissingRequiredQueryParameter** \\\n * **MissingRequiredHeader** \\\n * **MissingRequiredXmlNode** \\\n * **MultipleConditionHeadersNotSupported** \\\n * **OperationTimedOut** \\\n * **OutOfRangeInput** \\\n * **OutOfRangeQueryParameterValue** \\\n * **RequestBodyTooLarge** \\\n * **ResourceTypeMismatch** \\\n * **RequestUrlFailedToParse** \\\n * **ResourceAlreadyExists** \\\n * **ResourceNotFound** \\\n * **ServerBusy** \\\n * **UnsupportedHeader** \\\n * **UnsupportedXmlNode** \\\n * **UnsupportedQueryParameter** \\\n * **UnsupportedHttpVerb** \\\n * **AppendPositionConditionNotMet** \\\n * **BlobAlreadyExists** \\\n * **BlobImmutableDueToPolicy** \\\n * **BlobNotFound** \\\n * **BlobOverwritten** \\\n * **BlobTierInadequateForContentLength** \\\n * **BlobUsesCustomerSpecifiedEncryption** \\\n * **BlockCountExceedsLimit** \\\n * **BlockListTooLong** \\\n * **CannotChangeToLowerTier** \\\n * **CannotVerifyCopySource** \\\n * **ContainerAlreadyExists** \\\n * **ContainerBeingDeleted** \\\n * **ContainerDisabled** \\\n * **ContainerNotFound** \\\n * **ContentLengthLargerThanTierLimit** \\\n * **CopyAcrossAccountsNotSupported** \\\n * **CopyIdMismatch** \\\n * **FeatureVersionMismatch** \\\n * **IncrementalCopyBlobMismatch** \\\n * **IncrementalCopyOfEralierVersionSnapshotNotAllowed** \\\n * **IncrementalCopySourceMustBeSnapshot** \\\n * **InfiniteLeaseDurationRequired** \\\n * **InvalidBlobOrBlock** \\\n * **InvalidBlobTier** \\\n * **InvalidBlobType** \\\n * **InvalidBlockId** \\\n * **InvalidBlockList** \\\n * **InvalidOperation** \\\n * **InvalidPageRange** \\\n * **InvalidSourceBlobType** \\\n * **InvalidSourceBlobUrl** \\\n * **InvalidVersionForPageBlobOperation** \\\n * **LeaseAlreadyPresent** \\\n * **LeaseAlreadyBroken** \\\n * **LeaseIdMismatchWithBlobOperation** \\\n * **LeaseIdMismatchWithContainerOperation** \\\n * **LeaseIdMismatchWithLeaseOperation** \\\n * **LeaseIdMissing** \\\n * **LeaseIsBreakingAndCannotBeAcquired** \\\n * **LeaseIsBreakingAndCannotBeChanged** \\\n * **LeaseIsBrokenAndCannotBeRenewed** \\\n * **LeaseLost** \\\n * **LeaseNotPresentWithBlobOperation** \\\n * **LeaseNotPresentWithContainerOperation** \\\n * **LeaseNotPresentWithLeaseOperation** \\\n * **MaxBlobSizeConditionNotMet** \\\n * **NoAuthenticationInformation** \\\n * **NoPendingCopyOperation** \\\n * **OperationNotAllowedOnIncrementalCopyBlob** \\\n * **PendingCopyOperation** \\\n * **PreviousSnapshotCannotBeNewer** \\\n * **PreviousSnapshotNotFound** \\\n * **PreviousSnapshotOperationNotSupported** \\\n * **SequenceNumberConditionNotMet** \\\n * **SequenceNumberIncrementTooLarge** \\\n * **SnapshotCountExceeded** \\\n * **SnapshotOperationRateExceeded** \\\n * **SnapshotsPresent** \\\n * **SourceConditionNotMet** \\\n * **SystemInUse** \\\n * **TargetConditionNotMet** \\\n * **UnauthorizedBlobOverwrite** \\\n * **BlobBeingRehydrated** \\\n * **BlobArchived** \\\n * **BlobNotArchived** \\\n * **AuthorizationSourceIPMismatch** \\\n * **AuthorizationProtocolMismatch** \\\n * **AuthorizationPermissionMismatch** \\\n * **AuthorizationServiceMismatch** \\\n * **AuthorizationResourceTypeMismatch**\n */\nexport type StorageErrorCode = string;\n/** Defines values for GeoReplicationStatusType. */\nexport type GeoReplicationStatusType = \"live\" | \"bootstrap\" | \"unavailable\";\n/** Defines values for ListContainersIncludeType. */\nexport type ListContainersIncludeType = \"metadata\" | \"deleted\" | \"system\";\n/** Defines values for LeaseStatusType. */\nexport type LeaseStatusType = \"locked\" | \"unlocked\";\n/** Defines values for LeaseStateType. */\nexport type LeaseStateType =\n | \"available\"\n | \"leased\"\n | \"expired\"\n | \"breaking\"\n | \"broken\";\n/** Defines values for LeaseDurationType. */\nexport type LeaseDurationType = \"infinite\" | \"fixed\";\n/** Defines values for PublicAccessType. */\nexport type PublicAccessType = \"container\" | \"blob\";\n/** Defines values for SkuName. */\nexport type SkuName =\n | \"Standard_LRS\"\n | \"Standard_GRS\"\n | \"Standard_RAGRS\"\n | \"Standard_ZRS\"\n | \"Premium_LRS\";\n/** Defines values for AccountKind. */\nexport type AccountKind =\n | \"Storage\"\n | \"BlobStorage\"\n | \"StorageV2\"\n | \"FileStorage\"\n | \"BlockBlobStorage\";\n/** Defines values for ListBlobsIncludeItem. */\nexport type ListBlobsIncludeItem =\n | \"copy\"\n | \"deleted\"\n | \"metadata\"\n | \"snapshots\"\n | \"uncommittedblobs\"\n | \"versions\"\n | \"tags\"\n | \"immutabilitypolicy\"\n | \"legalhold\"\n | \"deletedwithversions\";\n/** Defines values for BlobType. */\nexport type BlobType = \"BlockBlob\" | \"PageBlob\" | \"AppendBlob\";\n/** Defines values for CopyStatusType. */\nexport type CopyStatusType = \"pending\" | \"success\" | \"aborted\" | \"failed\";\n/** Defines values for AccessTier. */\nexport type AccessTier =\n | \"P4\"\n | \"P6\"\n | \"P10\"\n | \"P15\"\n | \"P20\"\n | \"P30\"\n | \"P40\"\n | \"P50\"\n | \"P60\"\n | \"P70\"\n | \"P80\"\n | \"Hot\"\n | \"Cool\"\n | \"Archive\";\n/** Defines values for ArchiveStatus. */\nexport type ArchiveStatus =\n | \"rehydrate-pending-to-hot\"\n | \"rehydrate-pending-to-cool\";\n/** Defines values for RehydratePriority. */\nexport type RehydratePriority = \"High\" | \"Standard\";\n/** Defines values for BlobImmutabilityPolicyMode. */\nexport type BlobImmutabilityPolicyMode = \"Mutable\" | \"Unlocked\" | \"Locked\";\n/** Defines values for DeleteSnapshotsOptionType. */\nexport type DeleteSnapshotsOptionType = \"include\" | \"only\";\n/** Defines values for BlobCopySourceTags. */\nexport type BlobCopySourceTags = \"REPLACE\" | \"COPY\";\n/** Defines values for BlockListType. */\nexport type BlockListType = \"committed\" | \"uncommitted\" | \"all\";\n/** Defines values for SequenceNumberActionType. */\nexport type SequenceNumberActionType = \"max\" | \"update\" | \"increment\";\n/** Defines values for QueryFormatType. */\nexport type QueryFormatType = \"delimited\" | \"json\" | \"arrow\" | \"parquet\";\n/** Defines values for SyncCopyStatusType. */\nexport type SyncCopyStatusType = \"success\";\n/** Defines values for EncryptionAlgorithmType. */\nexport type EncryptionAlgorithmType = \"AES256\";\n\n/** Optional parameters. */\nexport interface ServiceSetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setProperties operation. */\nexport type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders &\n BlobServiceProperties & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceProperties;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetPropertiesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetStatisticsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getStatistics operation. */\nexport type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders &\n BlobServiceStatistics & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceStatistics;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetStatisticsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceListContainersSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify that the container's metadata be returned as part of the response body. */\n include?: ListContainersIncludeType[];\n}\n\n/** Contains response data for the listContainersSegment operation. */\nexport type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders &\n ListContainersSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListContainersSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceListContainersSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetUserDelegationKeyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getUserDelegationKey operation. */\nexport type ServiceGetUserDelegationKeyResponse = ServiceGetUserDelegationKeyHeaders &\n UserDelegationKey & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: UserDelegationKey;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ServiceSubmitBatchResponse = ServiceSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ServiceFilterBlobsResponse = ServiceFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n containerEncryptionScope?: ContainerEncryptionScope;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n}\n\n/** Contains response data for the create operation. */\nexport type ContainerCreateResponse = ContainerCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerDeleteOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type ContainerDeleteResponse = ContainerDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getAccessPolicy operation. */\nexport type ContainerGetAccessPolicyResponse = ContainerGetAccessPolicyHeaders &\n SignedIdentifier[] & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: SignedIdentifier[];\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerSetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n /** the acls for the container */\n containerAcl?: SignedIdentifier[];\n}\n\n/** Contains response data for the setAccessPolicy operation. */\nexport type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetAccessPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRestoreOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the name of the deleted container to restore. */\n deletedContainerName?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the version of the deleted container to restore. */\n deletedContainerVersion?: string;\n}\n\n/** Contains response data for the restore operation. */\nexport type ContainerRestoreResponse = ContainerRestoreHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRestoreHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenameOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A lease ID for the source path. If specified, the source path must have an active lease and the lease ID must match. */\n sourceLeaseId?: string;\n}\n\n/** Contains response data for the rename operation. */\nexport type ContainerRenameResponse = ContainerRenameHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenameHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ContainerSubmitBatchResponse = ContainerSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ContainerFilterBlobsResponse = ContainerFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type ContainerAcquireLeaseResponse = ContainerAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type ContainerReleaseLeaseResponse = ContainerReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type ContainerRenewLeaseResponse = ContainerRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type ContainerBreakLeaseResponse = ContainerBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type ContainerChangeLeaseResponse = ContainerChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerListBlobFlatSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobFlatSegment operation. */\nexport type ContainerListBlobFlatSegmentResponse = ContainerListBlobFlatSegmentHeaders &\n ListBlobsFlatSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsFlatSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerListBlobHierarchySegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobHierarchySegment operation. */\nexport type ContainerListBlobHierarchySegmentResponse = ContainerListBlobHierarchySegmentHeaders &\n ListBlobsHierarchySegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsHierarchySegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ContainerGetAccountInfoResponse = ContainerGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDownloadOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentMD5?: boolean;\n /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentCRC64?: boolean;\n}\n\n/** Contains response data for the download operation. */\nexport type BlobDownloadResponse = BlobDownloadHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDownloadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type BlobGetPropertiesResponse = BlobGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete only the blob's snapshots and not the blob itself */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /** Optional. Only possible value is 'permanent', which specifies to permanently delete a blob if blob soft delete is enabled. */\n blobDeleteType?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type BlobDeleteResponse = BlobDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobUndeleteOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the undelete operation. */\nexport type BlobUndeleteResponse = BlobUndeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobUndeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetExpiryOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The time to set the blob to expiry */\n expiresOn?: string;\n}\n\n/** Contains response data for the setExpiry operation. */\nexport type BlobSetExpiryResponse = BlobSetExpiryHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetExpiryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetHttpHeadersOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setHttpHeaders operation. */\nexport type BlobSetHttpHeadersResponse = BlobSetHttpHeadersHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetHttpHeadersHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Contains response data for the setImmutabilityPolicy operation. */\nexport type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the deleteImmutabilityPolicy operation. */\nexport type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetLegalHoldOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setLegalHold operation. */\nexport type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetLegalHoldHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type BlobSetMetadataResponse = BlobSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type BlobAcquireLeaseResponse = BlobAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type BlobReleaseLeaseResponse = BlobReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type BlobRenewLeaseResponse = BlobRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type BlobChangeLeaseResponse = BlobChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type BlobBreakLeaseResponse = BlobBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCreateSnapshotOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the createSnapshot operation. */\nexport type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCreateSnapshotHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobStartCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Overrides the sealed state of the destination blob. Service version 2019-12-12 and newer. */\n sealBlob?: boolean;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the startCopyFromURL operation. */\nexport type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobStartCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/** Contains response data for the copyFromURL operation. */\nexport type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAbortCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the abortCopyFromURL operation. */\nexport type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAbortCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetTierOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n}\n\n/** Contains response data for the setTier operation. */\nexport type BlobSetTierResponse = BlobSetTierHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTierHeaders;\n };\n};\n\n/** Contains response data for the getAccountInfo operation. */\nexport type BlobGetAccountInfoResponse = BlobGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobQueryOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** the query request */\n queryRequest?: QueryRequest;\n}\n\n/** Contains response data for the query operation. */\nexport type BlobQueryResponse = BlobQueryHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobQueryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getTags operation. */\nexport type BlobGetTagsResponse = BlobGetTagsHeaders &\n BlobTags & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobTags;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetTagsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface BlobSetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Blob tags */\n tags?: BlobTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the setTags operation. */\nexport type BlobSetTagsResponse = BlobSetTagsHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTagsHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the create operation. */\nexport type PageBlobCreateResponse = PageBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPages operation. */\nexport type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobClearPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the clearPages operation. */\nexport type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobClearPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPagesFromURL operation. */\nexport type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n}\n\n/** Contains response data for the getPageRanges operation. */\nexport type PageBlobGetPageRangesResponse = PageBlobGetPageRangesHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesDiffOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs created on or after January 1, 2016. */\n prevsnapshot?: string;\n /** Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous snapshot. */\n prevSnapshotUrl?: string;\n}\n\n/** Contains response data for the getPageRangesDiff operation. */\nexport type PageBlobGetPageRangesDiffResponse = PageBlobGetPageRangesDiffHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobResizeOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the resize operation. */\nexport type PageBlobResizeResponse = PageBlobResizeHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobResizeHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUpdateSequenceNumberOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the updateSequenceNumber operation. */\nexport type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUpdateSequenceNumberHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCopyIncrementalOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the copyIncremental operation. */\nexport type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCopyIncrementalHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the create operation. */\nexport type AppendBlobCreateResponse = AppendBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the appendBlock operation. */\nexport type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the appendBlockFromUrl operation. */\nexport type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobSealOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the seal operation. */\nexport type AppendBlobSealResponse = AppendBlobSealHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobSealHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobUploadOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n}\n\n/** Contains response data for the upload operation. */\nexport type BlockBlobUploadResponse = BlockBlobUploadHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobUploadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobPutBlobFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Optional, default is true. Indicates if properties from the source blob should be copied. */\n copySourceBlobProperties?: boolean;\n}\n\n/** Contains response data for the putBlobFromUrl operation. */\nexport type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobPutBlobFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the stageBlock operation. */\nexport type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the stageBlockFromURL operation. */\nexport type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobCommitBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the commitBlockList operation. */\nexport type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobCommitBlockListHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobGetBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n}\n\n/** Contains response data for the getBlockList operation. */\nexport type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders &\n BlockList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlockList;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobGetBlockListHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface StorageClientOptionalParams\n extends coreHttp.ServiceClientOptions {\n /** Specifies the version of the operation to use for this request. */\n version?: string;\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js new file mode 100644 index 00000000..c088f044 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js @@ -0,0 +1,8192 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +export const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +export const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +export const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +export const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +export const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +export const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +export const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +export const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +export const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +export const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +export const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +export const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +export const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; +export const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +export const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +export const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +export const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +export const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +export const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +export const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +export const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +export const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +export const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +export const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +export const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +export const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +export const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +export const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +//# sourceMappingURL=mappers.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map new file mode 100644 index 00000000..a26e2338 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mappers.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/mappers.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAIH,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,0BAA0B;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,oBAAoB,EAAE;gBACpB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,UAAU;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,UAAU;yBACtB;qBACF;iBACF;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,eAAe;iBAC3B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,oBAAoB,EAAE;gBACpB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,qBAAqB;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,gBAAgB;iBAC5B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;iBACpD;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,+BAA+B;IAC/C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,eAAe;yBAC3B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,uCAAuC;gBACvD,OAAO,EAAE,uCAAuC;gBAChD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,aAAa;gBAC7B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,gBAAgB;yBAC5B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,KAAK;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,SAAS;yBACrB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,OAAO,EAAE,KAAK;IACd,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,kBAAkB;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,EAAE,EAAE;gBACF,cAAc,EAAE,IAAI;gBACpB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,IAAI;gBACb,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,cAAc;iBAC1B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,8BAA8B;IAC9C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,wBAAwB;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,wBAAwB;IACxC,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;qBACV;iBACF;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;qBAC5B;iBACF;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,mCAAmC;IACnD,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,0BAA0B;iBACtC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,0BAA0B;IAC1C,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,cAAc,EAAE,aAAa;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,cAAc,EAAE,QAAQ;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA6B;IAC7C,cAAc,EAAE,OAAO;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,OAAO;QAClB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,WAAW;yBACvB;qBACF;iBACF;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,OAAO,EAAE,cAAc;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,aAAa;iBACzB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA6B;IACnD,cAAc,EAAE,aAAa;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,aAAa;QACxB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;iBACzD;aACF;YACD,0BAA0B,EAAE;gBAC1B,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,4BAA4B;iBACxC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,uBAAuB;iBACnC;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,KAAK;iBACZ;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,4BAA4B;IAC5C,OAAO,EAAE,4BAA4B;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,uBAAuB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2CAA2C,GAA6B;IACnF,cAAc,EAAE,8CAA8C;IAC9D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6CAA6C;QACxD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,gDAAgD;gBAChE,OAAO,EAAE,gDAAgD;gBACzD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAA6B;IAC9E,cAAc,EAAE,yCAAyC;IACzD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wCAAwC;QACnD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iDAAiD,GAA6B;IACzF,cAAc,EAAE,oDAAoD;IACpE,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mDAAmD;QAC9D,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,2BAA2B;IAC3C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,gCAAgC;gBAChD,OAAO,EAAE,gCAAgC;gBACzC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAA6B;IAC5D,cAAc,EAAE,uBAAuB;IACvC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sBAAsB;QACjC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,YAAY,EAAE,SAAS;gBACvB,UAAU,EAAE,IAAI;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js new file mode 100644 index 00000000..5aaeefaf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js @@ -0,0 +1,1608 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { QueryCollectionFormat } from "@azure/core-http"; +import { BlobServiceProperties as BlobServicePropertiesMapper, KeyInfo as KeyInfoMapper, QueryRequest as QueryRequestMapper, BlobTags as BlobTagsMapper, BlockLookupList as BlockLookupListMapper } from "../models/mappers"; +export const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServicePropertiesMapper +}; +export const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +export const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +export const version = { + parameterPath: "version", + mapper: { + defaultValue: "2021-06-08", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +export const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +export const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +export const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +export const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +export const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfoMapper +}; +export const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +export const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +export const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +export const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +export const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +export const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +export const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +export const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +export const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +export const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +export const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +export const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +export const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +export const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +export const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +export const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +export const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +export const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +export const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +export const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +export const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +export const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +export const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +export const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +export const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +export const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +export const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +export const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +export const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +export const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +export const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +export const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +export const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +export const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +export const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +export const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +export const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +export const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +export const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +export const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +export const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +export const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +export const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +export const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +export const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +export const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +export const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +export const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +export const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +export const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequestMapper +}; +export const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTagsMapper +}; +export const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +export const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +export const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +export const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +export const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +export const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +export const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +export const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +export const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +export const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +export const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +export const blocks = { + parameterPath: "blocks", + mapper: BlockLookupListMapper +}; +export const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; +//# sourceMappingURL=parameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map new file mode 100644 index 00000000..d1aed4e7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parameters.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/parameters.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAIL,qBAAqB,EACtB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,qBAAqB,IAAI,2BAA2B,EACpD,OAAO,IAAI,aAAa,EACxB,YAAY,IAAI,kBAAkB,EAClC,QAAQ,IAAI,cAAc,EAC1B,eAAe,IAAI,qBAAqB,EACzC,MAAM,mBAAmB,CAAC;AAE3B,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE,uBAAuB;IACtC,MAAM,EAAE,2BAA2B;CACpC,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,GAAG,GAA0B;IACxC,aAAa,EAAE,KAAK;IACpB,MAAM,EAAE;QACN,cAAc,EAAE,KAAK;QACrB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,KAAK;QACd,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;IACD,YAAY,EAAE,IAAI;CACnB,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAA4B;IAC3C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA4B;IACvD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA4B;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,2BAA2B;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE,aAAa;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,mBAAmB;QACjC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,OAAO;QACvB,OAAO,EAAE,OAAO;QAChB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;SACpC;QACD,sBAAsB,EAAE,YAAY;KACrC;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;SACrC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;KACzB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAAuB;IAChE,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;KACjC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;IAC9D,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;IACzE,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;IAC3E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,KAAK;QACnB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,mBAAmB;QAC5B,YAAY,EAAE,IAAI;QAClB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,kBAAkB;iBAC9B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,qBAAqB;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;IAC3C,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,sBAAsB;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;qBACtB;iBACF;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,8BAA8B;QAC9C,OAAO,EAAE,8BAA8B;QACvC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;IACrE,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA4B;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;IAC/D,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,sBAAsB;QACpC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;IACpD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;SACjD;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;aACV;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;IAC/C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;SACpC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;KACxB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,iCAAiC;QACjD,OAAO,EAAE,iCAAiC;QAC1C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;IAC7E,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;KACpB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;IAC5E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,YAAY;IAC3B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,oBAAoB;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,yBAAyB;IACxC,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;aACV;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE,kBAAkB;CAC3B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE,cAAc;CACvB,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,aAAa;QAC7B,OAAO,EAAE,aAAa;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAAuB;IAC3D,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;IACvD,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,mBAAmB;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,0BAA0B;QAC1C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,0BAA0B;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,0BAA0B;QACxC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAAuB;IACnE,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;KACpC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;KAC3B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,aAAa;IAC5B,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,OAAO;IACtB,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA4B;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;SAC9C;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,aAAa;QAC3B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;KACjB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,kCAAkC;QAClD,OAAO,EAAE,kCAAkC;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE,qBAAqB;CAC9B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;SACnD;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2021-06-08\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js new file mode 100644 index 00000000..7507b155 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js @@ -0,0 +1,237 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a AppendBlob. */ +export class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + mediaType: "binary", + serializer +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=appendBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map new file mode 100644 index 00000000..662f0016 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"appendBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/appendBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAanD,uCAAuC;AACvC,MAAM,OAAO,UAAU;IAGrB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;IACzC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;IAC9C,CAAC;IAED;;;;;;;;OAQG;IACH,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,IAAI,CACF,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;IACvC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;KAC1B;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;IAChD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js new file mode 100644 index 00000000..6810389a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js @@ -0,0 +1,1079 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Blob. */ +export class Blob { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags + ], + isXML: true, + serializer: xmlSerializer +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders + } + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders + } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +//# sourceMappingURL=blob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map new file mode 100644 index 00000000..71e3c61e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsDnD,iCAAiC;AACjC,MAAM,OAAO,IAAI;IAGf;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAAyC;QAEzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;IAC1C,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,OAAkC;QACvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;IACnC,CAAC;IAED;;;OAGG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC;QAErC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;IACtC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;IAClD,CAAC;IAED;;;OAGG;IACH,wBAAwB,CACtB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;;;OAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,OAAiC;QACrC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;IAClC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,wBAAwB;SAChD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iBAAiB;SACzC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,cAAc;KAC1B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;IACrD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,oBAAoB;SAC5C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;KACjB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;IACjD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js new file mode 100644 index 00000000..037ca674 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js @@ -0,0 +1,391 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a BlockBlob. */ +export class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=blockBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map new file mode 100644 index 00000000..d76b8b41 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blockBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blockBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAmBnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;;;;;;OAYG;IACH,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;IAED;;;;;;;;OAQG;IACH,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;;OASG;IACH,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;IACnD,CAAC;IAED;;;;;;;;;;OAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;;;OAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,wBAAwB;KACpC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;KACnB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,MAAM;IAC9B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,SAAS;YAC7B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js new file mode 100644 index 00000000..17f5d4ca --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js @@ -0,0 +1,769 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Container. */ +export class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=container.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map new file mode 100644 index 00000000..1e990106 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map @@ -0,0 +1 @@ +{"version":3,"file":"container.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/container.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwCnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA8C;QAE9C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACgB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;OAGG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;OAGG;IACH,OAAO,CACL,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,mBAAmB,CACjB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;IACrD,CAAC;IAED;;;;;;;OAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD;QAEzD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;IAC1D,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,8BAA8B;KAC1C;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,sCAAsC;SAC9D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;qBAC3D;iBACF;gBACD,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,kBAAkB;aACnC;YACD,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,aAAa;KACzB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;KACpB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;KACvB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;KAC5B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;IAC/D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,4BAA4B;YAChD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iCAAiC;YACrD,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iDAAiD;SACzE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js new file mode 100644 index 00000000..eca26879 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js @@ -0,0 +1,14 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./service"; +export * from "./container"; +export * from "./blob"; +export * from "./pageBlob"; +export * from "./appendBlob"; +export * from "./blockBlob"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map new file mode 100644 index 00000000..63770151 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,QAAQ,CAAC;AACvB,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./service\";\nexport * from \"./container\";\nexport * from \"./blob\";\nexport * from \"./pageBlob\";\nexport * from \"./appendBlob\";\nexport * from \"./blockBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js new file mode 100644 index 00000000..a014a5ad --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js @@ -0,0 +1,494 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a PageBlob. */ +export class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobResizeHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength + ], + isXML: true, + serializer: xmlSerializer +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=pageBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map new file mode 100644 index 00000000..8877c193 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pageBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/pageBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwBnD,qCAAqC;AACrC,MAAM,OAAO,QAAQ;IAGnB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;IAC3C,CAAC;IAED;;;;;;;;;;OAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;IACnD,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CACf,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;;OAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;OAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,kBAAkB;KAC9B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;KACnC;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,YAAY;KACxB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js new file mode 100644 index 00000000..106bca82 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js @@ -0,0 +1,345 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Service. */ +export class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map new file mode 100644 index 00000000..2826430f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map @@ -0,0 +1 @@ +{"version":3,"file":"service.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/service.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsBnD,oCAAoC;AACpC,MAAM,OAAO,OAAO;IAGlB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;IACrD,CAAC;IAED;;;;;OAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD;QAEnD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;IACpD,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACc,CAAC;IAC9C,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,qBAAqB;IAC7C,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,6BAA6B;YACjD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2CAA2C;SACnE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,OAAO;IAC/B,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js new file mode 100644 index 00000000..dfcaa045 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js @@ -0,0 +1,27 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { Service, Container, Blob, PageBlob, AppendBlob, BlockBlob } from "./operations"; +import { StorageClientContext } from "./storageClientContext"; +export class StorageClient extends StorageClientContext { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + super(url, options); + this.service = new Service(this); + this.container = new Container(this); + this.blob = new Blob(this); + this.pageBlob = new PageBlob(this); + this.appendBlob = new AppendBlob(this); + this.blockBlob = new BlockBlob(this); + } +} +//# sourceMappingURL=storageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map new file mode 100644 index 00000000..1163d8e5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,OAAO,EACP,SAAS,EACT,IAAI,EACJ,QAAQ,EACR,UAAU,EACV,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAG9D,MAAM,OAAO,aAAc,SAAQ,oBAAoB;IACrD;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACpB,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3B,IAAI,CAAC,QAAQ,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;CAQF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n Service,\n Container,\n Blob,\n PageBlob,\n AppendBlob,\n BlockBlob\n} from \"./operations\";\nimport { StorageClientContext } from \"./storageClientContext\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nexport class StorageClient extends StorageClientContext {\n /**\n * Initializes a new instance of the StorageClient class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n super(url, options);\n this.service = new Service(this);\n this.container = new Container(this);\n this.blob = new Blob(this);\n this.pageBlob = new PageBlob(this);\n this.appendBlob = new AppendBlob(this);\n this.blockBlob = new BlockBlob(this);\n }\n\n service: Service;\n container: Container;\n blob: Blob;\n pageBlob: PageBlob;\n appendBlob: AppendBlob;\n blockBlob: BlockBlob;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js new file mode 100644 index 00000000..b47e192a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js @@ -0,0 +1,39 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +const packageName = "azure-storage-blob"; +const packageVersion = "12.10.0"; +export class StorageClientContext extends coreHttp.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2021-06-08"; + } +} +//# sourceMappingURL=storageClientContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map new file mode 100644 index 00000000..98f18f9d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClientContext.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClientContext.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAG7C,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEjC,MAAM,OAAO,oBAAqB,SAAQ,QAAQ,CAAC,aAAa;IAI9D;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;SACzC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,GAAG,WAAW,IAAI,cAAc,IAAI,gBAAgB,EAAE,CAAC;SAC5E;QAED,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAE1B,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;QAE3C,wBAAwB;QACxB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QAEf,0CAA0C;QAC1C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.10.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2021-06-08\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js new file mode 100644 index 00000000..bdaf7224 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=generatedModels.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map new file mode 100644 index 00000000..e9600aff --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generatedModels.js","sourceRoot":"","sources":["../../../src/generatedModels.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Tags } from \".\";\nimport { BlobTags, BlobPropertiesInternal as BlobProperties } from \"./generated/src/models\";\n\nexport {\n AccessPolicy,\n AccessTier,\n AccountKind,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders,\n ArchiveStatus,\n BlobDeleteImmutabilityPolicyHeaders,\n BlobDeleteImmutabilityPolicyResponse,\n BlobImmutabilityPolicyMode,\n BlobAbortCopyFromURLHeaders,\n BlobCopyFromURLHeaders,\n BlobCopySourceTags,\n BlobCreateSnapshotHeaders,\n BlobDeleteHeaders,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobGetPropertiesHeaders,\n BlobGetPropertiesResponse as BlobGetPropertiesResponseModel,\n BlobPropertiesInternal as BlobProperties,\n BlobUndeleteResponse,\n BlobHttpHeaders as BlobHTTPHeaders,\n BlobSetHttpHeadersHeaders as BlobSetHTTPHeadersHeaders,\n BlobSetHttpHeadersResponse as BlobSetHTTPHeadersResponse,\n BlobSetImmutabilityPolicyHeaders,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldHeaders,\n BlobSetLegalHoldResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLHeaders,\n BlobStartCopyFromURLResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobSetMetadataHeaders,\n BlobSetTierHeaders,\n BlobSetTierResponse,\n BlobSetTagsHeaders,\n BlobDownloadHeaders,\n BlobDownloadResponse as BlobDownloadResponseModel,\n BlobType,\n BlobTags,\n BlobUndeleteHeaders,\n Block,\n BlockBlobCommitBlockListHeaders,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListHeaders,\n BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders,\n BlockList,\n BlockListType,\n BlockBlobGetBlockListResponse,\n BlobServiceProperties,\n BlobServiceStatistics,\n BlobGetTagsHeaders,\n BlobTag,\n ContainerCreateHeaders,\n ContainerCreateResponse,\n ContainerDeleteHeaders,\n ContainerDeleteResponse,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesHeaders,\n ContainerBreakLeaseOptionalParams,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerGetPropertiesResponse,\n ContainerProperties,\n ContainerSetMetadataResponse,\n ContainerSetAccessPolicyHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataHeaders,\n CopyStatusType,\n CorsRule,\n CpkInfo,\n DeleteSnapshotsOptionType,\n EncryptionAlgorithmType,\n GeoReplication,\n GeoReplicationStatusType,\n LeaseAccessConditions,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n ListContainersSegmentResponse,\n FilterBlobItem as FilterBlobItemModel,\n FilterBlobSegment as FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n Logging,\n Metrics,\n ModifiedAccessConditions as ModifiedAccessConditionsModel,\n PublicAccessType,\n PageBlobCreateResponse,\n PageBlobUploadPagesResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobClearPagesHeaders,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders,\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesResponse as PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesDiffResponse as PageBlobGetPageRangesDiffResponseModel,\n PageBlobResizeHeaders,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberHeaders,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders,\n PageBlobCopyIncrementalResponse,\n SequenceNumberActionType,\n RehydratePriority,\n RetentionPolicy,\n AppendPositionAccessConditions,\n ServiceGetUserDelegationKeyHeaders,\n ServiceSubmitBatchHeaders,\n ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsHeaders,\n SequenceNumberAccessConditions,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentHeaders,\n ServiceListContainersSegmentResponse,\n ServiceSetPropertiesHeaders,\n SkuName,\n StaticWebsite,\n ContainerItem,\n ServiceSubmitBatchResponse as ServiceSubmitBatchResponseModel,\n ServiceSubmitBatchOptionalParams as ServiceSubmitBatchOptionalParamsModel,\n SignedIdentifier as SignedIdentifierModel,\n SyncCopyStatusType,\n UserDelegationKey as UserDelegationKeyModel,\n ContainerEncryptionScope,\n BlobQueryHeaders,\n BlobQueryResponse as BlobQueryResponseModel,\n ContainerRestoreResponse as ContainerUndeleteResponse,\n ContainerRestoreHeaders as ContainerUndeleteHeaders,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobPutBlobFromUrlHeaders,\n ContainerRenameResponse,\n ContainerRenameHeaders,\n} from \"./generated/src/models\";\n\n// Following definitions are to avoid breaking change.\nexport interface BlobPrefix {\n name: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegmentModel {\n blobItems: BlobItemInternal[];\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegmentModel {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\n/**\n * Blob info from a {@link BlobServiceClient.findBlobsByTags}\n */\nexport interface FilterBlobItem {\n /**\n * Blob Name.\n */\n name: string;\n\n /**\n * Container Name.\n */\n containerName: string;\n\n /**\n * Blob Tags.\n */\n tags?: Tags;\n\n /**\n * Tag value.\n *\n * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags.\n */\n tagValue: string;\n}\n\n/**\n * Segment response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\nexport interface PageRangeInfo {\n start: number;\n end: number;\n isClear: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js new file mode 100644 index 00000000..93dc89c1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export { BlockBlobTier, PremiumPageBlobTier, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map new file mode 100644 index 00000000..c9629ebb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AAGzC,OAAO,EACL,aAAa,EAEb,mBAAmB,GAOpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAE5C,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n BlobImmutabilityPolicy,\n PremiumPageBlobTier,\n Tags,\n TagConditions,\n ContainerRequestConditions,\n HttpAuthorization,\n ModificationConditions,\n MatchConditions,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js new file mode 100644 index 00000000..41b1b1de --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./sas/AccountSASPermissions"; +export * from "./sas/AccountSASResourceTypes"; +export * from "./sas/AccountSASServices"; +export * from "./sas/AccountSASSignatureValues"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./sas/BlobSASPermissions"; +export * from "./sas/BlobSASSignatureValues"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./sas/ContainerSASPermissions"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export * from "./credentials/StorageSharedKeyCredential"; +export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./policies/StorageSharedKeyCredentialPolicy"; +export * from "./sas/SASQueryParameters"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map new file mode 100644 index 00000000..d7ec1197 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAG7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,0BAA0B,CAAC;AACzC,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,8BAA8B,CAAC;AAC7C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AACzC,cAAc,0CAA0C,CAAC;AAGzD,OAAO,EACL,aAAa,EACb,mBAAmB,EAUnB,mBAAmB,GACpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6CAA6C,CAAC;AAC5D,cAAc,0BAA0B,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAYlC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./sas/AccountSASPermissions\";\nexport * from \"./sas/AccountSASResourceTypes\";\nexport * from \"./sas/AccountSASServices\";\nexport * from \"./sas/AccountSASSignatureValues\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./sas/BlobSASPermissions\";\nexport * from \"./sas/BlobSASSignatureValues\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./sas/ContainerSASPermissions\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport * from \"./credentials/StorageSharedKeyCredential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n PremiumPageBlobTier,\n Tags,\n BlobDownloadResponseParsed,\n BlobImmutabilityPolicy,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n BlobQueryArrowField,\n BlobQueryArrowFieldType,\n HttpAuthorization,\n StorageBlobAudience,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport * from \"./policies/StorageSharedKeyCredentialPolicy\";\nexport * from \"./sas/SASQueryParameters\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport {\n AppendBlobRequestConditions,\n BlobRequestConditions,\n Metadata,\n PageBlobRequestConditions,\n TagConditions,\n ContainerRequestConditions,\n ModificationConditions,\n MatchConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\nexport {\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js new file mode 100644 index 00000000..a746f21c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + */ +export const logger = createClientLogger("storage-blob"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map new file mode 100644 index 00000000..839d2f0c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,cAAc,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js new file mode 100644 index 00000000..2945123d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EncryptionAlgorithmAES25 } from "./utils/constants"; +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export var BlockBlobTier; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(BlockBlobTier || (BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export var PremiumPageBlobTier; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(PremiumPageBlobTier || (PremiumPageBlobTier = {})); +export function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +export function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +export var StorageBlobAudience; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(StorageBlobAudience || (StorageBlobAudience = {})); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map new file mode 100644 index 00000000..713fa16e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAWlC,OAAO,EAAE,wBAAwB,EAAE,MAAM,mBAAmB,CAAC;AAyF7D;;;GAGG;AACH,MAAM,CAAN,IAAY,aAcX;AAdD,WAAY,aAAa;IACvB;;OAEG;IACH,4BAAW,CAAA;IACX;;OAEG;IACH,8BAAa,CAAA;IACb;;;OAGG;IACH,oCAAmB,CAAA;AACrB,CAAC,EAdW,aAAa,KAAb,aAAa,QAcxB;AAED;;;;GAIG;AACH,MAAM,CAAN,IAAY,mBA6CX;AA7CD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;AACb,CAAC,EA7CW,mBAAmB,KAAnB,mBAAmB,QA6C9B;AAED,MAAM,UAAU,YAAY,CAC1B,IAA8D;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,OAAO,IAAkB,CAAC,CAAC,qGAAqG;AAClI,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,GAAwB,EAAE,OAAgB;IAC7E,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;QACnB,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;KACnF;IAED,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;QACnC,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;KACpD;AACH,CAAC;AA2HD;;GAEG;AACH,MAAM,CAAN,IAAY,mBASX;AATD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gFAAyD,CAAA;IACzD;;OAEG;IACH,yFAAkE,CAAA;AACpE,CAAC,EATW,mBAAmB,KAAnB,mBAAmB,QAS9B","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js new file mode 100644 index 00000000..12270dbc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map new file mode 100644 index 00000000..ecb1efb5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/AnonymousCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;GAGG;AACH,MAAM,OAAO,yBAA0B,SAAQ,gBAAgB;IAC7D;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js new file mode 100644 index 00000000..a2f1fccc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy } from "@azure/core-http"; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} +//# sourceMappingURL=CredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map new file mode 100644 index 00000000..1c1ba7e1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"CredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/CredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAsC,MAAM,kBAAkB,CAAC;AAEzF;;;GAGG;AACH,MAAM,OAAgB,gBAAiB,SAAQ,iBAAiB;IAC9D;;;;OAIG;IACI,WAAW,CAAC,OAAoB;QACrC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACjE,CAAC;IAED;;;;;OAKG;IACO,WAAW,CAAC,OAAoB;QACxC,4EAA4E;QAC5E,qCAAqC;QACrC,OAAO,OAAO,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js new file mode 100644 index 00000000..a272017b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder } from "@azure/core-http"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { delay } from "@azure/core-http"; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=StorageBearerTokenChallengeAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map new file mode 100644 index 00000000..2992be83 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBearerTokenChallengeAuthenticationPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAiD,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC7F,OAAO,EACL,iBAAiB,GAIlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC;;GAEG;AACH,MAAM,SAAS,GAAG;IAChB,YAAY,EAAE,WAAW;IACzB;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;KAC/B;CACF,CAAC;AAiCF,sDAAsD;AACtD,MAAM,sBAAsB,GAAuB;IACjD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AACD;;;GAGG;AACH,SAAS,YAAY,CAAC,QAA+B;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;IAC3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;QACxC,OAAO,SAAS,CAAC;KAClB;IACD,OAAO;AACT,CAAC;AAUD;;;;;GAKG;AACH,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,GAAG,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IAChF,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;IACF,mCAAmC;IACnC,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,iCAAM,CAAC,GAAK,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AAEH,MAAM,UAAU,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQ,iBAAiB;QAC7E,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;YAClC,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CACH,CAAC,KAAK,CAAC;YACR,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;gBAC5B,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;gBACzC,IAAI,SAAS,EAAE;oBACb,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;oBACzD,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;oBAE5E,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;wBACpC,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;yBAC3C;wBACD,QAAQ,EAAE,QAAQ;qBACnB,CAAC,CACH,CAAC,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;oBAChC,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,UAAU,iBAAiB,EAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClF,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js new file mode 100644 index 00000000..ad35d504 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, isNode, } from "@azure/core-http"; +import { HeaderConstants, URLConstants } from "../utils/constants"; +import { setURLParameter } from "../utils/utils.common"; +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=StorageBrowserPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map new file mode 100644 index 00000000..0916fcf8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBrowserPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EAEjB,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AACnE,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAExD;;;;;;;;;;GAUG;AACH,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;SACH;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;QAE/C,oFAAoF;QACpF,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js new file mode 100644 index 00000000..04003738 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { URLConstants } from "../utils/constants"; +import { delay, setURLHost, setURLParameter } from "../utils/utils.common"; +import { logger } from "../log"; +/** + * A factory method used to generated a RetryPolicy factory. + * + * @param retryOptions - + */ +export function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + }, + }; +} +/** + * RetryPolicy types. + */ +export var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} +//# sourceMappingURL=StorageRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map new file mode 100644 index 00000000..11f74f2f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,OAAO,EAEL,iBAAiB,GAOlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,YAAkC;IACtE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAsB,EAAE;YACvF,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACnE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,sBASX;AATD,WAAY,sBAAsB;IAChC;;OAEG;IACH,iFAAW,CAAA;IACX;;OAEG;IACH,qEAAK,CAAA;AACP,CAAC,EATW,sBAAsB,KAAtB,sBAAsB,QASjC;AAED,wCAAwC;AACxC,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;IAC7B,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAE,sBAAsB,CAAC,WAAW;IACnD,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS,EAAE,2CAA2C;CACvE,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;GAEG;AACH,MAAM,OAAO,kBAAmB,SAAQ,iBAAiB;IAMvD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB;QAEzD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAE3B,2BAA2B;QAC3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;gBAC3C,CAAC,CAAC,YAAY,CAAC,eAAe;gBAC9B,CAAC,CAAC,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;gBACjD,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;gBACnC,CAAC,CAAC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,YAAY,CAAC,cAAc;gBAC7B,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;oBAC5B,CAAC,CAAC,YAAY,CAAC,iBAAiB;oBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAkB,CAC7C;gBACH,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;gBACnE,CAAC,CAAC,YAAY,CAAC,iBAAiB;gBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;gBACvC,CAAC,CAAC,YAAY,CAAC,aAAa;gBAC5B,CAAC,CAAC,qBAAqB,CAAC,aAAa;SACxC,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;IACpD,CAAC;IAED;;;;;;;;;OASG;IACO,KAAK,CAAC,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe;QAEf,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;YACf,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;YAChC,CAAC,CAAC,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;YACxF,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;YACnB,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;SAC/E;QAED,kEAAkE;QAClE,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;YACpC,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;SACH;QAED,IAAI,QAA2C,CAAC;QAChD,IAAI;YACF,MAAM,CAAC,IAAI,CAAC,2BAA2B,OAAO,IAAI,cAAc,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACxD,OAAO,QAAQ,CAAC;aACjB;YAED,eAAe,GAAG,eAAe,IAAI,CAAC,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;SACnF;QAAC,OAAO,GAAQ,EAAE;YACjB,MAAM,CAAC,KAAK,CAAC,uCAAuC,GAAG,CAAC,OAAO,WAAW,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;YACtF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;gBAC7D,MAAM,GAAG,CAAC;aACX;SACF;QAED,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;;OAOG;IACO,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe;QAEf,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,2BAA2B,OAAO,gBAAgB,IAAI,CAAC,YAAY;iBAChE,QAAS,mBAAmB,CAChC,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QAED,iFAAiF;QACjF,uBAAuB;QACvB,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;YACP,oBAAoB,EAAE,2DAA2D;SAClF,CAAC;QACF,IAAI,GAAG,EAAE;YACP,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAClD,CAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;oBACA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,qBAAqB,CAAC,CAAC;oBAC/E,OAAO,IAAI,CAAC;iBACb;aACF;SACF;QAED,kFAAkF;QAClF,gFAAgF;QAChF,gEAAgE;QAChE,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YACzE,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;gBACzC,MAAM,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAC;gBACnE,OAAO,IAAI,CAAC;aACb;YAED,0CAA0C;YAC1C,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;gBAC5C,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,GAAG,CAAC,CAAC;gBACtE,OAAO,IAAI,CAAC;aACb;SACF;QAED,IAAI,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,iCAAiC,CAAC,CAAA,EAAE;YAC7F,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;YACF,OAAO,IAAI,CAAC;SACb;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;OAMG;IACK,KAAK,CAAC,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;QAE9B,IAAI,cAAc,EAAE;YAClB,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe,EAAE;gBACzC,KAAK,sBAAsB,CAAC,WAAW;oBACrC,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAK,sBAAsB,CAAC,KAAK;oBAC/B,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;aACT;SACF;aAAM;YACL,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;SACtC;QAED,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,IAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;IAC9D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js new file mode 100644 index 00000000..ac9b2ce5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HeaderConstants } from "../utils/constants"; +import { getURLPath, getURLQueries } from "../utils/utils.common"; +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} +//# sourceMappingURL=StorageSharedKeyCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map new file mode 100644 index 00000000..a421e659 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageSharedKeyCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;GAEG;AACH,MAAM,OAAO,gCAAiC,SAAQ,gBAAgB;IAMpE;;;;;OAKG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC;QAEnC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED;;;;OAIG;IACO,WAAW,CAAC,OAAoB;QACxC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;YACZ,CAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;YAC5E,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;YACA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;SACtF;QAED,MAAM,YAAY,GAChB;YACE,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;YACJ,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;YAC3C,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;QACvE,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,EAAE,CACrD,CAAC;QAEF,uCAAuC;QACvC,0DAA0D;QAC1D,mEAAmE;QACnE,+EAA+E;QAC/E,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;OAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,CAAC;SACX;QAED,0EAA0E;QAC1E,sEAAsE;QACtE,yFAAyF;QACzF,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;YAClE,OAAO,EAAE,CAAC;SACX;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,6BAA6B,CAAC,OAAoB;QACxD,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,EAAE;YACjE,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;QACjF,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAU,EAAE;YACjC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAClE,CAAC,CAAC,CAAC;QAEH,2BAA2B;QAC3B,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACjF,OAAO,KAAK,CAAC;aACd;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;QAClD,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;YAC9B,gCAAgC,IAAI,GAAG,MAAM,CAAC,IAAI;iBAC/C,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC;QAChD,CAAC,CAAC,CAAC;QAEH,OAAO,gCAAgC,CAAC;IAC1C,CAAC;IAED;;;;OAIG;IACK,8BAA8B,CAAC,OAAoB;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,WAAW,GAAG,IAAI,EAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;QACvD,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;gBACzB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;oBACtD,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;oBAC9C,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;iBAC9B;aACF;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;YACjB,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;gBAC3B,2BAA2B,IAAI,KAAK,GAAG,IAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC;aACxF;SACF;QAED,OAAO,2BAA2B,CAAC;IACrC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js new file mode 100644 index 00000000..f5b173bb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, HttpHeaders, isNode, } from "@azure/core-http"; +import { HeaderConstants } from "../utils/constants"; +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +export class TelemetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=TelemetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map new file mode 100644 index 00000000..5b4b98bf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/TelemetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,WAAW,EAEX,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAErD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IAMpD;;;;;OAKG;IACH,YAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB;QACrF,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;gBACpD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACjE;SACF;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js new file mode 100644 index 00000000..52034245 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "@azure/core-http"; +import { Poller } from "@azure/core-lro"; +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +export class BlobBeginCopyFromUrlPoller extends Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} +//# sourceMappingURL=BlobStartCopyFromUrlPoller.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map new file mode 100644 index 00000000..b41e8693 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobStartCopyFromUrlPoller.js","sourceRoot":"","sources":["../../../../src/pollers/BlobStartCopyFromUrlPoller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAqC,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAsE5E;;;;;GAKG;AACH,MAAM,OAAO,0BAA2B,SAAQ,MAG/C;IAGC,YAAY,OAA0C;QACpD,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;QAEZ,IAAI,KAAgD,CAAC;QAErD,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;SACtC;QAED,MAAM,SAAS,GAAG,qCAAqC,iCAClD,KAAK,KACR,UAAU;YACV,UAAU;YACV,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;YACpC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;SAC7B;QAED,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK;QACV,OAAO,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IAClC,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;QACrB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,IAAI,CAAC,MAAM,EAAE;QACX,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;QACzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,sFAAsF;IACtF,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;KACjC,CAAC,CAAC;IACH,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAEzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;IAElE,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QACpB,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;QAEtF,4BAA4B;QAC5B,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC7B,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;YACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;YACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;SAAM,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;YAC1F,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;YAC5C,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;YAC5C,IAAI,YAAY,EAAE;gBAChB,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;aACnC;YACD,IACE,UAAU,KAAK,SAAS;gBACxB,YAAY,KAAK,gBAAgB;gBACjC,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;gBACA,2CAA2C;gBAC3C,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;aAC7B;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;gBACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;gBACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;gBAClC,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,kCAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,GAAG,CAC/E,CAAC;gBACF,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;SACF;QAAC,OAAO,GAAQ,EAAE;YACjB,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;YAClB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;IAED,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ;IAG/E,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;QAC1D,2FAA2F;QAC3F,IAAI,GAAG,KAAK,YAAY,EAAE;YACxB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,KAAK,CAAC;IACf,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC;IAEpC,OAAO;QACL,KAAK,oBAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js new file mode 100644 index 00000000..14107386 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=AccountSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map new file mode 100644 index 00000000..c1e00171 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAqB;IAAlC;QA4GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA0D1C,CAAC;IApOC;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAE1D,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;YAC3B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,EAAE,CAAC,CAAC;aAC9D;SACF;QAED,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAyC;QAC1D,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC5C;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACpD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC9C;QACD,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAmED;;;;;;;;;OASG;IACI,QAAQ;QACb,iFAAiF;QACjF,wFAAwF;QACxF,iFAAiF;QACjF,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js new file mode 100644 index 00000000..7780df0f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} +//# sourceMappingURL=AccountSASResourceTypes.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map new file mode 100644 index 00000000..23f49a59 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASResourceTypes.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASResourceTypes.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6BE;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,cAAS,GAAY,KAAK,CAAC;QAElC;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;IAqBjC,CAAC;IA9DC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,aAAqB;QACvC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;YAC7B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAiBD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAChC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js new file mode 100644 index 00000000..2e6f1e74 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} +//# sourceMappingURL=AccountSASServices.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map new file mode 100644 index 00000000..1a36f199 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASServices.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASServices.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAgCE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;IAsBhC,CAAC;IAvEC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,QAAgB;QAClC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;YACxB,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,EAAE,CAAC,CAAC;aAC3D;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAsBD;;;OAGG;IACI,QAAQ;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js new file mode 100644 index 00000000..ceaf07c9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AccountSASPermissions } from "./AccountSASPermissions"; +import { AccountSASResourceTypes } from "./AccountSASResourceTypes"; +import { AccountSASServices } from "./AccountSASServices"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} +//# sourceMappingURL=AccountSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map new file mode 100644 index 00000000..7e87510c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAmE7D;;;;;;;;;;GAUG;AACH,MAAM,UAAU,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C;IAE/C,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;QAC/C,CAAC,CAAC,yBAAyB,CAAC,OAAO;QACnC,CAAC,CAAC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACvE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;IACF,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;IAC/F,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;IAEb,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;YAC1F,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;SAAM;QACL,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js new file mode 100644 index 00000000..ea435c13 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=BlobSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map new file mode 100644 index 00000000..c3768ea7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAiGE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA6C1C,CAAC;IAlMC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAsC;QACvD,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;SACjC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;SACzC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACjD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC3C;QACD,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAyDD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js new file mode 100644 index 00000000..f7d47f9e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js @@ -0,0 +1,555 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BlobSASPermissions } from "./BlobSASPermissions"; +import { ContainerSASPermissions } from "./ContainerSASPermissions"; +import { StorageSharedKeyCredential } from "../credentials/StorageSharedKeyCredential"; +import { UserDelegationKeyCredential } from "../credentials/UserDelegationKeyCredential"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +export function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} +//# sourceMappingURL=BlobSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map new file mode 100644 index 00000000..38f9b76d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,0BAA0B,EAAE,MAAM,2CAA2C,CAAC;AACvF,OAAO,EAAE,2BAA2B,EAAE,MAAM,4CAA4C,CAAC;AACzF,OAAO,EAAE,eAAe,EAAc,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AA4O7D,MAAM,UAAU,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB;IAEpB,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAElG,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;QAC1E,CAAC,CAAC,sCAAsC;QACxC,CAAC,CAAC,SAAS,CAAC;IAChB,IAAI,2BAAoE,CAAC;IAEzE,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;KACH;IAED,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;QAClF,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;KACnF;IAED,8DAA8D;IAC9D,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;SACH;KACF;IAED,gEAAgE;IAChE,gGAAgG;IAChG,yHAAyH;IACzH,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,8HAA8H;YAC9H,IAAI,OAAO,IAAI,YAAY,EAAE;gBAC3B,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;iBAAM;gBACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;SACF;KACF;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;SACH;KACF;IAED,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;KAChB;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB;IACrF,2CAA2C;IAC3C,oDAAoD;IACpD,MAAM,QAAQ,GAAa,CAAC,SAAS,WAAW,IAAI,aAAa,EAAE,CAAC,CAAC;IACrE,IAAI,QAAQ,EAAE;QACZ,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC;KAC/B;IACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C;IAE9C,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAClG,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;QACjE,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;QACxF,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;KAC5E;IAED,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;QAC9D,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;KACnF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;QACrF,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;KACzE;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QAClC,CAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;QACA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;KACjG;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QACjC,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;QACA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;KAC1F;IAED,IACE,OAAO,GAAG,YAAY;QACtB,CAAC,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;QACA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;KACH;IAED,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACpE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;IACzC,OAAO,sBAAsB,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js new file mode 100644 index 00000000..d25bfd44 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=ContainerSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map new file mode 100644 index 00000000..068b7ab8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/ContainerSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;GAMG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;QAExC;;WAEG;QACI,iBAAY,GAAY,KAAK,CAAC;IAqDvC,CAAC;IAhOC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAA2C;QAC5D,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC9C;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;SACxC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACtD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;SAChD;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;YAC/B,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;SAC7C;QACD,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAmED;;;;;;;OAOG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js new file mode 100644 index 00000000..c3869d63 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ipRangeToString } from "./SasIPRange"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * Protocols for generated SAS. + */ +export var SASProtocol; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(SASProtocol || (SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} +//# sourceMappingURL=SASQueryParameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map new file mode 100644 index 00000000..7b597a29 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SASQueryParameters.js","sourceRoot":"","sources":["../../../../src/sas/SASQueryParameters.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D;;GAEG;AACH,MAAM,CAAN,IAAY,WAUX;AAVD,WAAY,WAAW;IACrB;;OAEG;IACH,8BAAe,CAAA;IAEf;;OAEG;IACH,0CAA2B,CAAA;AAC7B,CAAC,EAVW,WAAW,KAAX,WAAW,QAUtB;AA4FD;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAsN7B,YACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB;QAExB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;YAClF,4BAA4B;YAC5B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YACpD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;YACxD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;YAChD,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;YACjD,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;YAClD,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;YACtD,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;YAClE,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAE1E,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;gBAClF,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;aACzD;SACF;aAAM;YACL,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;YACxC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;YAC5B,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;YACjC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAE/B,IAAI,iBAAiB,EAAE;gBACrB,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBAClD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;gBACzD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBACrD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBAErD,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;gBAC7D,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;aACpC;SACF;IACH,CAAC;IA1JD;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;gBACL,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;gBAC1B,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IA+ID;;;OAGG;IACI,QAAQ;QACb,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;YAC1B,QAAQ,KAAK,EAAE;gBACb,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACvE,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACzE,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CACzD,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK,EAAE,wBAAwB;oBAClC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,yBAAyB;oBACnC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;aACT;SACF;QACD,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;OAMG;IACK,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;SACR;QAED,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;QAC9B,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC,CAAC;SACjC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js new file mode 100644 index 00000000..9f386b67 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +export function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} +//# sourceMappingURL=SasIPRange.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map new file mode 100644 index 00000000..0a67a117 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SasIPRange.js","sourceRoot":"","sources":["../../../../src/sas/SasIPRange.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,OAAmB;IACjD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;AACzE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js new file mode 100644 index 00000000..d2682080 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +import { EventEmitter } from "events"; +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +export class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} +//# sourceMappingURL=Batch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map new file mode 100644 index 00000000..62abe3e6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Batch.js","sourceRoot":"","sources":["../../../../src/utils/Batch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,8FAA8F;AAC9F,mCAAmC;AACnC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAOtC;;GAEG;AACH,IAAK,WAGJ;AAHD,WAAK,WAAW;IACd,6CAAI,CAAA;IACJ,+CAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,QAGf;AAED;;;;GAIG;AACH,MAAM,OAAO,KAAK;IAqChB;;;OAGG;IACH,YAAmB,cAAsB,CAAC;QAnC1C;;WAEG;QACK,YAAO,GAAW,CAAC,CAAC;QAE5B;;WAEG;QACK,cAAS,GAAW,CAAC,CAAC;QAE9B;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,eAAU,GAAgB,EAAE,CAAC;QAErC;;;WAGG;QACK,UAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;SAC3D;QACD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,YAAY,EAAE,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACI,YAAY,CAAC,SAAoB;QACtC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;aACxB;YAAC,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;aACnC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,EAAE;QACb,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAChC,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;SAC1B;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;gBACjC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACK,aAAa;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACK,eAAe;QACrB,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;SACR;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;SACR;QAED,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;YACtC,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;YACvC,IAAI,SAAS,EAAE;gBACb,SAAS,EAAE,CAAC;aACb;iBAAM;gBACL,OAAO;aACR;SACF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js new file mode 100644 index 00000000..cfe60cc0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +import { AvroReadableFromStream, AvroReader } from "../../../storage-internal-avro/src"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +export class BlobQuickQueryStream extends Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} +//# sourceMappingURL=BlobQuickQueryStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map new file mode 100644 index 00000000..db25b58d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQuickQueryStream.js","sourceRoot":"","sources":["../../../../src/utils/BlobQuickQueryStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAKlC,OAAO,EAAE,sBAAsB,EAAE,UAAU,EAAE,MAAM,oCAAoC,CAAC;AAqBxF;;;;GAIG;AACH,MAAM,OAAO,oBAAqB,SAAQ,QAAQ;IAQhD;;;;;OAKG;IACH,YAAmB,MAA6B,EAAE,UAAuC,EAAE;QACzF,KAAK,EAAE,CAAC;QAXF,eAAU,GAAY,IAAI,CAAC;QAYjC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACrF,CAAC;IAEM,KAAK;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC1B,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IAEO,KAAK,CAAC,YAAY;QACxB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QACxB,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;aACP;YACD,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;YAC3B,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;YACpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;gBAC9B,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;aAC/C;YAED,QAAQ,MAAM,EAAE;gBACd,KAAK,0DAA0D;oBAC7D;wBACE,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;4BACxC,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;4BACjC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;yBACxB;qBACF;oBACD,MAAM;gBACR,KAAK,wDAAwD;oBAC3D;wBACE,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;wBAC/C,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpC,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;yBAC9D;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;yBAChD;qBACF;oBACD,MAAM;gBACR,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnB,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;wBAC3C,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;qBAC9C;oBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBACR,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;wBAChB,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;wBACjC,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;4BAC9B,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;4BAC5B,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;yBACnD;wBACD,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;wBAC7C,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnC,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;yBAC1D;wBACD,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;wBACvC,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;4BAChC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;4BACJ,OAAO,EAAE,KAAK;4BACd,WAAW;yBACZ,CAAC,CAAC;qBACJ;oBACD,MAAM;gBACR;oBACE,MAAM,KAAK,CAAC,kBAAkB,MAAM,2BAA2B,CAAC,CAAC;aACpE;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js new file mode 100644 index 00000000..d2e1dc67 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +export class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; +//# sourceMappingURL=Mutex.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map new file mode 100644 index 00000000..f4371594 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Mutex.js","sourceRoot":"","sources":["../../../../src/utils/Mutex.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,IAAK,eAGJ;AAHD,WAAK,eAAe;IAClB,yDAAM,CAAA;IACN,6DAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,QAGnB;AAID;;GAEG;AACH,MAAM,OAAO,KAAK;IAChB;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAW;QAClC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;gBACxC,OAAO,EAAE,CAAC;aACX;iBAAM;gBACL,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;oBACxC,OAAO,EAAE,CAAC;gBACZ,CAAC,CAAC,CAAC;aACJ;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,GAAW;QACpC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;gBAC7C,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;aAC3B;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACtB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;IACL,CAAC;IAKO,MAAM,CAAC,aAAa,CAAC,GAAW,EAAE,OAAiB;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;SACjC;aAAM;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;SACnC;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,GAAW;QACxC,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,GAAG,EAAE;gBAChB,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;;AAlBc,UAAI,GAAuC,EAAE,CAAC;AAC9C,eAAS,GAAkC,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js new file mode 100644 index 00000000..2d58e9d2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +export class RetriableReadableStream extends Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} +//# sourceMappingURL=RetriableReadableStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map new file mode 100644 index 00000000..6a24ff32 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RetriableReadableStream.js","sourceRoot":"","sources":["../../../../src/utils/RetriableReadableStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAgClC;;;;GAIG;AACH,MAAM,OAAO,uBAAwB,SAAQ,QAAQ;IAWnD;;;;;;;;;OASG;IACH,YACE,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,YAAO,GAAW,CAAC,CAAC;QAoDpB,sBAAiB,GAAG,CAAC,IAAY,EAAE,EAAE;YAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;gBAClC,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;gBAC3C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;gBACpB,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;gBACvC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;aACR;YAED,eAAe;YACf,2EAA2E;YAC3E,KAAK;YACL,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;aAC5D;YACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;aACrB;QACH,CAAC,CAAC;QAEM,4BAAuB,GAAG,CAAC,GAAW,EAAE,EAAE;YAChD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;aACR;YAED,eAAe;YACf,kDAAkD;YAClD,kBAAkB;YAClB,+BAA+B;YAC/B,KAAK;YACL,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACjB;iBAAM,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;gBAClC,eAAe;gBACf,gEAAgE;gBAChE,KAAK;gBACL,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;oBACxC,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;oBAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;yBACrB,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE;wBAClB,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;oBACT,CAAC,CAAC;yBACD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;wBACf,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;oBACtB,CAAC,CAAC,CAAC;iBACN;qBAAM;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,sHACE,IAAI,CAAC,MAAM,GAAG,CAChB,yBAAyB,IAAI,CAAC,GAAG,cAAc,IAAI,CAAC,OAAO,kBACzD,IAAI,CAAC,gBACP,EAAE,CACH,CACF,CAAC;iBACH;aACF;iBAAM;gBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,sBAAsB,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CACxC,CACF,CAAC;aACH;QACH,CAAC,CAAC;QAnGA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,gBAAgB;YACnB,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3F,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;IAChC,CAAC;IAEM,KAAK;QACV,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;IACvB,CAAC;IAEO,sBAAsB;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACxD,CAAC;IAEO,yBAAyB;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACpE,CAAC;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC;QAC7D,iDAAiD;QACjD,IAAI,CAAC,yBAAyB,EAAE,CAAC;QAChC,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;QAEpC,QAAQ,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js new file mode 100644 index 00000000..131ba872 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "@azure/core-http"; +const _defaultHttpClient = new DefaultHttpClient(); +export function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map new file mode 100644 index 00000000..8156a9f0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../../../src/utils/cache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAGrD,MAAM,kBAAkB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAEnD,MAAM,UAAU,0BAA0B;IACxC,OAAO,kBAAkB,CAAC;AAC5B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js new file mode 100644 index 00000000..05cc2602 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js @@ -0,0 +1,198 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "12.10.0"; +export const SERVICE_VERSION = "2021-06-08"; +export const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +export const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +export const BLOCK_BLOB_MAX_BLOCKS = 50000; +export const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +export const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +export const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +/** + * The OAuth scope to use with Azure Storage. + */ +export const StorageOAuthScopes = "https://storage.azure.com/.default"; +export const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +export const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +export const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +export const ETagNone = ""; +export const ETagAny = "*"; +export const SIZE_1_MB = 1 * 1024 * 1024; +export const BATCH_MAX_REQUEST = 256; +export const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +export const HTTP_LINE_ENDING = "\r\n"; +export const HTTP_VERSION_1_1 = "HTTP/1.1"; +export const EncryptionAlgorithmAES25 = "AES256"; +export const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +export const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +export const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +export const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +export const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map new file mode 100644 index 00000000..06e3c480 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../../src/utils/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,WAAW,GAAW,SAAS,CAAC;AAC7C,MAAM,CAAC,MAAM,eAAe,GAAW,YAAY,CAAC;AAEpD,MAAM,CAAC,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ;AACnF,MAAM,CAAC,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,SAAS;AACrF,MAAM,CAAC,MAAM,qBAAqB,GAAW,KAAK,CAAC;AACnD,MAAM,CAAC,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAC9E,MAAM,CAAC,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAChF,MAAM,CAAC,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAC7D;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAsB,oCAAoC,CAAC;AAE1F,MAAM,CAAC,MAAM,YAAY,GAAG;IAC1B,UAAU,EAAE;QACV,sBAAsB,EAAE,GAAG;QAC3B,SAAS,EAAE,KAAK;QAChB,QAAQ,EAAE,UAAU;QACpB,SAAS,EAAE,WAAW;QACtB,OAAO,EAAE,SAAS;KACnB;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG;IAC/B,aAAa,EAAE,GAAG;IAClB,aAAa,EAAE,GAAG;IAClB,cAAc,EAAE,GAAG;IACnB,kBAAkB,EAAE,GAAG;IACvB,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,aAAa,EAAE,eAAe;IAC9B,oBAAoB,EAAE,QAAQ;IAC9B,gBAAgB,EAAE,kBAAkB;IACpC,UAAU,EAAE,YAAY;IACxB,gBAAgB,EAAE,kBAAkB;IACpC,cAAc,EAAE,gBAAgB;IAChC,WAAW,EAAE,aAAa;IAC1B,yBAAyB,EAAE,2BAA2B;IACtD,YAAY,EAAE,cAAc;IAC5B,MAAM,EAAE,QAAQ;IAChB,IAAI,EAAE,MAAM;IACZ,QAAQ,EAAE,UAAU;IACpB,iBAAiB,EAAE,mBAAmB;IACtC,aAAa,EAAE,eAAe;IAC9B,mBAAmB,EAAE,qBAAqB;IAC1C,kBAAkB,EAAE,OAAO;IAC3B,KAAK,EAAE,OAAO;IACd,UAAU,EAAE,YAAY;IACxB,sBAAsB,EAAE,wBAAwB;IAChD,gBAAgB,EAAE,kBAAkB;IACpC,SAAS,EAAE,WAAW;IACtB,eAAe,EAAE,iBAAiB;IAClC,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAG,EAAE,CAAC;AAC3B,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,CAAC;AAE3B,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AACzC,MAAM,CAAC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AACrC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACxD,MAAM,CAAC,MAAM,gBAAgB,GAAG,MAAM,CAAC;AACvC,MAAM,CAAC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAE3C,MAAM,CAAC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAEjD,MAAM,CAAC,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAElQ,MAAM,CAAC,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AAC5F,MAAM,CAAC,MAAM,yCAAyC,GACpD,2CAA2C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.10.0\";\nexport const SERVICE_VERSION: string = \"2021-06-08\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js new file mode 100644 index 00000000..66a4527e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-tracing"; +/** + * Creates a span using the global tracer. + * @internal + */ +export const createSpan = createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +export function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map new file mode 100644 index 00000000..82a88ca9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../../../src/utils/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAEzD;;;GAGG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IAC3C,aAAa,EAAE,oBAAoB;IACnC,SAAS,EAAE,mBAAmB;CAC/B,CAAC,CAAC;AAEH;;;;;;GAMG;AACH,MAAM,UAAU,kCAAkC,CAChD,OAA0B;;IAE1B,OAAO;QACL,+HAA+H;QAC/H,WAAW,EAAE,MAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAsB,0CAAE,WAAW;QAC1D,cAAc,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAc,0CAAE,cAAc;KACxD,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js new file mode 100644 index 00000000..8b76ae8e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Convert a Browser Blob object into ArrayBuffer. + * + * @param blob - + */ +export async function blobToArrayBuffer(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsArrayBuffer(blob); + }); +} +/** + * Convert a Browser Blob object into string. + * + * @param blob - + */ +export async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); +} +export function streamToBuffer() { + /* empty */ +} +export function streamToBuffer2() { + /* empty */ +} +export function readStreamToLocalFile() { + /* empty */ +} +export const fsStat = function stat() { + /* empty */ +}; +export const fsCreateReadStream = function createReadStream() { + /* empty */ +}; +//# sourceMappingURL=utils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map new file mode 100644 index 00000000..be894324 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.browser.js","sourceRoot":"","sources":["../../../../src/utils/utils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,IAAU;IAChD,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAClD,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,IAAU;IAC3C,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,cAAc;IAC5B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,eAAe;IAC7B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,qBAAqB;IACnC,WAAW;AACb,CAAC;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,SAAS,IAAI;IACjC,WAAW;AACb,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,SAAS,gBAAgB;IACzD,WAAW;AACb,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Convert a Browser Blob object into ArrayBuffer.\n *\n * @param blob -\n */\nexport async function blobToArrayBuffer(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsArrayBuffer(blob);\n });\n}\n\n/**\n * Convert a Browser Blob object into string.\n *\n * @param blob -\n */\nexport async function blobToString(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsText(blob);\n });\n}\n\nexport function streamToBuffer(): void {\n /* empty */\n}\n\nexport function streamToBuffer2(): void {\n /* empty */\n}\n\nexport function readStreamToLocalFile(): void {\n /* empty */\n}\n\nexport const fsStat = function stat(): void {\n /* empty */\n};\n\nexport const fsCreateReadStream = function createReadStream(): void {\n /* empty */\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js new file mode 100644 index 00000000..24116df4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js @@ -0,0 +1,992 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isNode, URLBuilder } from "@azure/core-http"; +import { DevelopmentConnectionString, HeaderConstants, URLConstants } from "./constants"; +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +export function escapeURLPath(url) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +export function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +export function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + const accountName = getAccountNameFromUrl(blobEndpoint); + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +export function appendToURLPath(url, name) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +export function setURLParameter(url, name, value) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +export function getURLParameter(url, name) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +export function setURLHost(url, host) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +export function getURLPath(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +export function getURLScheme(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +export function getURLPathAndQuery(url) { + const urlParsed = URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +export function getURLQueries(url) { + let queryString = URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +export function appendToURLQuery(url, queryParts) { + const urlParsed = URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +export function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +export function base64encode(content) { + return !isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Base64 decode. + * + * @param encodedString - + */ +export function base64decode(encodedString) { + return !isNode ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +export function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +export async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +export function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +export function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; +} +export function sanitizeHeaders(originalHeader) { + const headers = new HttpHeaders(); + for (const header of originalHeader.headersArray()) { + if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(header.name, "*****"); + } + else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(header.name, sanitizeURL(header.value)); + } + else { + headers.set(header.name, header.value); + } + } + return headers; +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +export function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +export function getAccountNameFromUrl(url) { + const parsedUrl = URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +export function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port), use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +export function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +export function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +export function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +export function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +export function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +export function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +export function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +export function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +export function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +export function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobProperties(blobPropertiesInXML) { + const blobProperties = blobPropertiesInXML; + if (blobPropertiesInXML["Creation-Time"]) { + blobProperties.createdOn = new Date(blobPropertiesInXML["Creation-Time"]); + delete blobProperties["Creation-Time"]; + } + if (blobPropertiesInXML["Last-Modified"]) { + blobProperties.lastModified = new Date(blobPropertiesInXML["Last-Modified"]); + delete blobProperties["Last-Modified"]; + } + if (blobPropertiesInXML["Etag"]) { + blobProperties.etag = blobPropertiesInXML["Etag"]; + delete blobProperties["Etag"]; + } + if (blobPropertiesInXML["Content-Length"]) { + blobProperties.contentLength = parseFloat(blobPropertiesInXML["Content-Length"]); + delete blobProperties["Content-Length"]; + } + if (blobPropertiesInXML["Content-Type"]) { + blobProperties.contentType = blobPropertiesInXML["Content-Type"]; + delete blobProperties["Content-Type"]; + } + if (blobPropertiesInXML["Content-Encoding"]) { + blobProperties.contentEncoding = blobPropertiesInXML["Content-Encoding"]; + delete blobProperties["Content-Encoding"]; + } + if (blobPropertiesInXML["Content-Language"]) { + blobProperties.contentLanguage = blobPropertiesInXML["Content-Language"]; + delete blobProperties["Content-Language"]; + } + if (blobPropertiesInXML["Content-MD5"]) { + blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML["Content-MD5"]); + delete blobProperties["Content-MD5"]; + } + if (blobPropertiesInXML["Content-Disposition"]) { + blobProperties.contentDisposition = blobPropertiesInXML["Content-Disposition"]; + delete blobProperties["Content-Disposition"]; + } + if (blobPropertiesInXML["Cache-Control"]) { + blobProperties.cacheControl = blobPropertiesInXML["Cache-Control"]; + delete blobProperties["Cache-Control"]; + } + if (blobPropertiesInXML["x-ms-blob-sequence-number"]) { + blobProperties.blobSequenceNumber = parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]); + delete blobProperties["x-ms-blob-sequence-number"]; + } + if (blobPropertiesInXML["BlobType"]) { + blobProperties.blobType = blobPropertiesInXML["BlobType"]; + delete blobProperties["BlobType"]; + } + if (blobPropertiesInXML["LeaseStatus"]) { + blobProperties.leaseStatus = blobPropertiesInXML["LeaseStatus"]; + delete blobProperties["LeaseStatus"]; + } + if (blobPropertiesInXML["LeaseState"]) { + blobProperties.leaseState = blobPropertiesInXML["LeaseState"]; + delete blobProperties["LeaseState"]; + } + if (blobPropertiesInXML["LeaseDuration"]) { + blobProperties.leaseDuration = blobPropertiesInXML["LeaseDuration"]; + delete blobProperties["LeaseDuration"]; + } + if (blobPropertiesInXML["CopyId"]) { + blobProperties.copyId = blobPropertiesInXML["CopyId"]; + delete blobProperties["CopyId"]; + } + if (blobPropertiesInXML["CopyStatus"]) { + blobProperties.copyStatus = blobPropertiesInXML["CopyStatus"]; + delete blobProperties["CopyStatus"]; + } + if (blobPropertiesInXML["CopySource"]) { + blobProperties.copySource = blobPropertiesInXML["CopySource"]; + delete blobProperties["CopySource"]; + } + if (blobPropertiesInXML["CopyProgress"]) { + blobProperties.copyProgress = blobPropertiesInXML["CopyProgress"]; + delete blobProperties["CopyProgress"]; + } + if (blobPropertiesInXML["CopyCompletionTime"]) { + blobProperties.copyCompletedOn = new Date(blobPropertiesInXML["CopyCompletionTime"]); + delete blobProperties["CopyCompletionTime"]; + } + if (blobPropertiesInXML["CopyStatusDescription"]) { + blobProperties.copyStatusDescription = blobPropertiesInXML["CopyStatusDescription"]; + delete blobProperties["CopyStatusDescription"]; + } + if (blobPropertiesInXML["ServerEncrypted"]) { + blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML["ServerEncrypted"]); + delete blobProperties["ServerEncrypted"]; + } + if (blobPropertiesInXML["IncrementalCopy"]) { + blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML["IncrementalCopy"]); + delete blobProperties["IncrementalCopy"]; + } + if (blobPropertiesInXML["DestinationSnapshot"]) { + blobProperties.destinationSnapshot = blobPropertiesInXML["DestinationSnapshot"]; + delete blobProperties["DestinationSnapshot"]; + } + if (blobPropertiesInXML["DeletedTime"]) { + blobProperties.deletedOn = new Date(blobPropertiesInXML["DeletedTime"]); + delete blobProperties["DeletedTime"]; + } + if (blobPropertiesInXML["RemainingRetentionDays"]) { + blobProperties.remainingRetentionDays = parseFloat(blobPropertiesInXML["RemainingRetentionDays"]); + delete blobProperties["RemainingRetentionDays"]; + } + if (blobPropertiesInXML["AccessTier"]) { + blobProperties.accessTier = blobPropertiesInXML["AccessTier"]; + delete blobProperties["AccessTier"]; + } + if (blobPropertiesInXML["AccessTierInferred"]) { + blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML["AccessTierInferred"]); + delete blobProperties["AccessTierInferred"]; + } + if (blobPropertiesInXML["ArchiveStatus"]) { + blobProperties.archiveStatus = blobPropertiesInXML["ArchiveStatus"]; + delete blobProperties["ArchiveStatus"]; + } + if (blobPropertiesInXML["CustomerProvidedKeySha256"]) { + blobProperties.customerProvidedKeySha256 = blobPropertiesInXML["CustomerProvidedKeySha256"]; + delete blobProperties["CustomerProvidedKeySha256"]; + } + if (blobPropertiesInXML["EncryptionScope"]) { + blobProperties.encryptionScope = blobPropertiesInXML["EncryptionScope"]; + delete blobProperties["EncryptionScope"]; + } + if (blobPropertiesInXML["AccessTierChangeTime"]) { + blobProperties.accessTierChangedOn = new Date(blobPropertiesInXML["AccessTierChangeTime"]); + delete blobProperties["AccessTierChangeTime"]; + } + if (blobPropertiesInXML["TagCount"]) { + blobProperties.tagCount = parseFloat(blobPropertiesInXML["TagCount"]); + delete blobProperties["TagCount"]; + } + if (blobPropertiesInXML["Expiry-Time"]) { + blobProperties.expiresOn = new Date(blobPropertiesInXML["Expiry-Time"]); + delete blobProperties["Expiry-Time"]; + } + if (blobPropertiesInXML["Sealed"]) { + blobProperties.isSealed = ParseBoolean(blobPropertiesInXML["Sealed"]); + delete blobProperties["Sealed"]; + } + if (blobPropertiesInXML["RehydratePriority"]) { + blobProperties.rehydratePriority = blobPropertiesInXML["RehydratePriority"]; + delete blobProperties["RehydratePriority"]; + } + if (blobPropertiesInXML["LastAccessTime"]) { + blobProperties.lastAccessedOn = new Date(blobPropertiesInXML["LastAccessTime"]); + delete blobProperties["LastAccessTime"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyUntilDate"]) { + blobProperties.immutabilityPolicyExpiresOn = new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]); + delete blobProperties["ImmutabilityPolicyUntilDate"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyMode"]) { + blobProperties.immutabilityPolicyMode = blobPropertiesInXML["ImmutabilityPolicyMode"]; + delete blobProperties["ImmutabilityPolicyMode"]; + } + if (blobPropertiesInXML["LegalHold"]) { + blobProperties.legalHold = ParseBoolean(blobPropertiesInXML["LegalHold"]); + delete blobProperties["LegalHold"]; + } + return blobProperties; +} +function ParseBlobItem(blobInXML) { + const blobItem = blobInXML; + blobItem.properties = ParseBlobProperties(blobInXML["Properties"]); + delete blobItem["Properties"]; + blobItem.name = ParseBlobName(blobInXML["Name"]); + delete blobItem["Name"]; + blobItem.deleted = ParseBoolean(blobInXML["Deleted"]); + delete blobItem["Deleted"]; + if (blobInXML["Snapshot"]) { + blobItem.snapshot = blobInXML["Snapshot"]; + delete blobItem["Snapshot"]; + } + if (blobInXML["VersionId"]) { + blobItem.versionId = blobInXML["VersionId"]; + delete blobItem["VersionId"]; + } + if (blobInXML["IsCurrentVersion"]) { + blobItem.isCurrentVersion = ParseBoolean(blobInXML["IsCurrentVersion"]); + delete blobItem["IsCurrentVersion"]; + } + if (blobInXML["Metadata"]) { + blobItem.metadata = blobInXML["Metadata"]; + delete blobItem["Metadata"]; + } + if (blobInXML["Tags"]) { + blobItem.blobTags = ParseBlobTags(blobInXML["Tags"]); + delete blobItem["Tags"]; + } + if (blobInXML["OrMetadata"]) { + blobItem.objectReplicationMetadata = blobInXML["OrMetadata"]; + delete blobItem["OrMetadata"]; + } + if (blobInXML["HasVersionsOnly"]) { + blobItem.hasVersionsOnly = ParseBoolean(blobInXML["HasVersionsOnly"]); + delete blobItem["HasVersionsOnly"]; + } + return blobItem; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +export function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +export function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} +export function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map new file mode 100644 index 00000000..7bad8065 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,UAAU,EAAmB,MAAM,kBAAkB,CAAC;AA8BpF,OAAO,EAAE,2BAA2B,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAiBzF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmDG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;IAEnB,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;IACpB,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB;IAC5D,gCAAgC;IAChC,sKAAsK;IACtK,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;QACjE,4FAA4F;QAC5F,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACrD,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;aACxE;SACF;KACF;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC7C,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACvC,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;SACrD;KACF;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,4BAA4B,CAAC,gBAAwB;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;IAElB,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;QAC7D,gCAAgC;QAChC,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;KAChD;IAED,yDAAyD;IACzD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;IAC1E,uCAAuC;IACvC,kGAAkG;IAClG,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;QACA,4BAA4B;QAE5B,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;QAExB,2BAA2B;QAC3B,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;QACpE,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;YACjB,+DAA+D;YAC/D,6FAA6F;YAE7F,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;YAC9F,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;YACzD,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;gBAC/C,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;aACH;YAED,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;gBACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YACD,YAAY,GAAG,GAAG,wBAAwB,MAAM,WAAW,SAAS,cAAc,EAAE,CAAC;SACtF;QAED,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;SAC1E;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;SACzE;QAED,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;KACH;SAAM;QACL,wBAAwB;QAExB,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;QACnF,MAAM,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;QACxD,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;aAAM,IAAI,CAAC,UAAU,EAAE;YACtB,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;QAED,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;KAC9E;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,MAAM,CAAC,IAAY;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;SAC5B,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,uBAAuB;SAC5C,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,iBAAiB;SACtC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,qBAAqB;AAChD,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACjF,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc;IACvE,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,IAAY;IAClD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAW;IACtC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,GAAW;IAC5C,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;KACzD;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;IAC7C,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;QACtB,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,CAAC,qCAAqC;KACnH;IAED,OAAO,GAAG,UAAU,GAAG,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,IAAI,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;QAChB,OAAO,EAAE,CAAC;KACX;IAED,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,EAAE,EAAE;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAChD,OAAO,CACL,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAC7F,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;IAC9C,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC/C,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACtC,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACtB;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAAC,GAAW,EAAE,UAAkB;IAC9D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;IACjC,IAAI,KAAK,EAAE;QACT,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;KAC3B;SAAM;QACL,KAAK,GAAG,UAAU,CAAC;KACpB;IAED,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1B,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI;IAC/E,iEAAiE;IACjE,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;IAEtC,OAAO,gBAAgB;QACrB,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;QAC/D,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,OAAe;IAC1C,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,aAAqB;IAChD,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AACzF,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAAqB,EAAE,UAAkB;IACvE,mEAAmE;IACnE,MAAM,qBAAqB,GAAG,EAAE,CAAC;IAEjC,4EAA4E;IAC5E,MAAM,mBAAmB,GAAG,CAAC,CAAC;IAE9B,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;IAElF,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;KACvE;IACD,MAAM,GAAG,GACP,aAAa;QACb,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACrF,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,2CAA2C;QAC3C,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,GAAG,EAAE;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;aACvB;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;QACrB,CAAC,CAAC;QAEF,MAAM,cAAc,GAAG,GAAG,EAAE;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aACpD;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC;QAEF,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;SACjD;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG;IAEvB,+EAA+E;IAC/E,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;KACxD;IAED,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;IAC7B,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;QACvC,OAAO,aAAa,CAAC;KACtB;SAAM;QACL,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;QACnD,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;SAChE;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;KACzD;AACH,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,IAAI,OAAO,GAAW,GAAG,CAAC;IAC1B,IAAI,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;QAC/D,OAAO,GAAG,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;KAChF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,cAA2B;IACzD,MAAM,OAAO,GAAgB,IAAI,WAAW,EAAE,CAAC;IAC/C,KAAK,MAAM,MAAM,IAAI,cAAc,CAAC,YAAY,EAAE,EAAE;QAClD,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,aAAa,CAAC,WAAW,EAAE,EAAE;YAC7E,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACnC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,gBAAgB,EAAE;YACzE,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;SACrD;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACxC;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AACD;;;;;GAKG;AACH,MAAM,UAAU,MAAM,CAAC,IAAY,EAAE,IAAY;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,GAAW;IAC/C,MAAM,SAAS,GAAe,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACpD,IAAI,WAAW,CAAC;IAChB,IAAI;QACF,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;YACjD,yEAAyE;YACzE,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;YACvC,iFAAiF;YACjF,2GAA2G;YAC3G,mCAAmC;YACnC,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM;YACL,qEAAqE;YACrE,WAAW,GAAG,EAAE,CAAC;SAClB;QACD,OAAO,WAAW,CAAC;KACpB;IAAC,OAAO,KAAU,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;KAC7E;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,SAAqB;IACrD,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;QACrC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;IAE9F,sFAAsF;IACtF,gEAAgE;IAChE,wEAAwE;IACxE,wFAAwF;IACxF,OAAO,4HAA4H,CAAC,IAAI,CACtI,IAAI,CACL,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB,CAAC,IAAW;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;IACpB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,GAAG,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;SAC1E;KACF;IAED,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,IAAW;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAa;QACpB,UAAU,EAAE,EAAE;KACf,CAAC;IAEF,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;aACN,CAAC,CAAC;SACJ;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,MAAM,CAAC,IAAe;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;IACrB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;KAClC;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAClC,iBAIiC;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,OAAO,SAAS,CAAC;KAClB;IAED,QAAQ,iBAAiB,CAAC,IAAI,EAAE;QAC9B,KAAK,KAAK;YACR,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,WAAW;oBACjB,0BAA0B,EAAE;wBAC1B,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;wBACzD,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;wBAClD,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;wBACnD,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;qBACtD;iBACF;aACF,CAAC;QACJ,KAAK,MAAM;YACT,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,MAAM;oBACZ,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;qBACnD;iBACF;aACF,CAAC;QACJ,KAAK,OAAO;YACV,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,OAAO;oBACb,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;qBACjC;iBACF;aACF,CAAC;QACJ,KAAK,SAAS;YACZ,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,SAAS;iBAChB;aACF,CAAC;QAEJ;YACE,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;KACtD;AACH,CAAC;AAED,MAAM,UAAU,4BAA4B,CAC1C,uBAAgD;IAEhD,IAAI,CAAC,uBAAuB,EAAE;QAC5B,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;QAC1C,+FAA+F;QAC/F,sFAAsF;QACtF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;IACnD,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;YACnC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;SAChD;QACD,MAAM,IAAI,GAA0B;YAClC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;YACd,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnF,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC5C;aAAM;YACL,YAAY,CAAC,IAAI,CAAC;gBAChB,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;aACd,CAAC,CAAC;SACJ;KACF;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,gBAAgB,CAAI,KAAQ,EAAE,UAA2B;IACtE,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;IACvC,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,yBAAyB,CACvC,iBAAqC;IAErC,OAAO,iBAAiB,CAAC,CAAC,CAAC,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;AAClG,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAc;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;QAChB,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;KAC1C;SAAM;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;KACtB;AACH,CAAC;AAED,MAAM,UAAU,qCAAqC,CACnD,gBAA8C;IAE9C,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,gBAAmD;;IAEnD,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,YAAY,EAAE,MAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC9E,MAAM,UAAU,GAAoB;oBAClC,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;iBAChD,CAAC;gBACF,OAAO,UAAU,CAAC;YACpB,CAAC,CAAC;YACF,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,KAAa;IACvC,IAAI,MAAM,EAAE;QACV,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;KACrC;SAAM;QACL,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;SACnC;QACD,OAAO,GAAG,CAAC;KACZ;AACH,CAAC;AAED,SAAS,YAAY,CAAC,OAAY;IAChC,IAAI,OAAO,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC5C,IAAI,OAAO,KAAK,MAAM;QAAE,OAAO,IAAI,CAAC;IACpC,IAAI,OAAO,KAAK,OAAO;QAAE,OAAO,KAAK,CAAC;IACtC,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB;IACvC,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;QACxE,OAAO;YACL,OAAO,EAAE,YAAY,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC;YACpD,OAAO,EAAE,aAAa,CAAC,GAAG,CAAW;SACtC,CAAC;KACH;SAAM;QACL,OAAO;YACL,OAAO,EAAE,KAAK;YACd,OAAO,EAAE,aAAuB;SACjC,CAAC;KACH;AACH,CAAC;AAED,SAAS,mBAAmB,CAAC,mBAAwB;IACnD,MAAM,cAAc,GAAG,mBAAmB,CAAC;IAC3C,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;QACpF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;QACvF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,MAAM,CAAC,EAAE;QAC/B,cAAc,CAAC,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAW,CAAC;QAC5D,OAAO,cAAc,CAAC,MAAM,CAAC,CAAC;KAC/B;IAED,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;QAC3F,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;KACzC;IAED,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;QACvC,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;QAC3E,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;KACvC;IAED,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;QAC3C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;QACnF,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;KAC3C;IAED,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;QAC3C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;QACnF,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;KAC3C;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,UAAU,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAC7F,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;QAC9C,cAAc,CAAC,kBAAkB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;QACzF,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;KAC9C;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,eAAe,CAAW,CAAC;QAC7E,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,kBAAkB,GAAG,UAAU,CAC5C,mBAAmB,CAAC,2BAA2B,CAAW,CAC3D,CAAC;QACF,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;KACpD;IAED,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,mBAAmB,CAAC,UAAU,CAAa,CAAC;QACtE,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;KACnC;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,aAAa,CAAoB,CAAC;QACnF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;QAChF,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAsB,CAAC;QACzF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,MAAM,GAAG,mBAAmB,CAAC,QAAQ,CAAW,CAAC;QAChE,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;KACjC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;QAChF,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAW,CAAC;QACxE,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;QACvC,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;QAC5E,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;KACvC;IAED,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,eAAe,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,oBAAoB,CAAW,CAAC,CAAC;QAC/F,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;KAC7C;IAED,IAAI,mBAAmB,CAAC,uBAAuB,CAAC,EAAE;QAChD,cAAc,CAAC,qBAAqB,GAAG,mBAAmB,CAAC,uBAAuB,CAAW,CAAC;QAC9F,OAAO,cAAc,CAAC,uBAAuB,CAAC,CAAC;KAChD;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;QAC9C,cAAc,CAAC,mBAAmB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;QAC1F,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;KAC9C;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAClF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,UAAU,CAChD,mBAAmB,CAAC,wBAAwB,CAAW,CACxD,CAAC;QACF,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACjD;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAe,CAAC;QAC5E,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,kBAAkB,GAAG,YAAY,CAAC,mBAAmB,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAC5F,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;KAC7C;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAkB,CAAC;QACrF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,yBAAyB,GAAG,mBAAmB,CAC5D,2BAA2B,CAClB,CAAC;QACZ,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;KACpD;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,iBAAiB,CAAW,CAAC;QAClF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,sBAAsB,CAAC,EAAE;QAC/C,cAAc,CAAC,mBAAmB,GAAG,IAAI,IAAI,CAC3C,mBAAmB,CAAC,sBAAsB,CAAW,CACtD,CAAC;QACF,OAAO,cAAc,CAAC,sBAAsB,CAAC,CAAC;KAC/C;IAED,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,UAAU,CAAW,CAAC,CAAC;QAChF,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;KACnC;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAClF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,QAAQ,GAAG,YAAY,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC;QACtE,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;KACjC;IAED,IAAI,mBAAmB,CAAC,mBAAmB,CAAC,EAAE;QAC5C,cAAc,CAAC,iBAAiB,GAAG,mBAAmB,CACpD,mBAAmB,CACC,CAAC;QACvB,OAAO,cAAc,CAAC,mBAAmB,CAAC,CAAC;KAC5C;IAED,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,cAAc,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;QAC1F,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;KACzC;IAED,IAAI,mBAAmB,CAAC,6BAA6B,CAAC,EAAE;QACtD,cAAc,CAAC,2BAA2B,GAAG,IAAI,IAAI,CACnD,mBAAmB,CAAC,6BAA6B,CAAW,CAC7D,CAAC;QACF,OAAO,cAAc,CAAC,6BAA6B,CAAC,CAAC;KACtD;IAED,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,mBAAmB,CACzD,wBAAwB,CACK,CAAC;QAChC,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACjD;IAED,IAAI,mBAAmB,CAAC,WAAW,CAAC,EAAE;QACpC,cAAc,CAAC,SAAS,GAAG,YAAY,CAAC,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC;QAC1E,OAAO,cAAc,CAAC,WAAW,CAAC,CAAC;KACpC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,SAAS,aAAa,CAAC,SAAc;IACnC,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,QAAQ,CAAC,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC;IACnE,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE9B,QAAQ,CAAC,IAAI,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;IACjD,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;IACxB,QAAQ,CAAC,OAAO,GAAG,YAAY,CAAC,SAAS,CAAC,SAAS,CAAC,CAAE,CAAC;IACvD,OAAO,QAAQ,CAAC,SAAS,CAAC,CAAC;IAE3B,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;QACzB,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAW,CAAC;QACpD,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;KAC7B;IAED,IAAI,SAAS,CAAC,WAAW,CAAC,EAAE;QAC1B,QAAQ,CAAC,SAAS,GAAG,SAAS,CAAC,WAAW,CAAW,CAAC;QACtD,OAAO,QAAQ,CAAC,WAAW,CAAC,CAAC;KAC9B;IAED,IAAI,SAAS,CAAC,kBAAkB,CAAC,EAAE;QACjC,QAAQ,CAAC,gBAAgB,GAAG,YAAY,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC,CAAC;QACxE,OAAO,QAAQ,CAAC,kBAAkB,CAAC,CAAC;KACrC;IAED,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;QACzB,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAC,CAAC;QAC1C,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;KAC7B;IAED,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;QACrB,QAAQ,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;KACzB;IAED,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;QAC3B,QAAQ,CAAC,yBAAyB,GAAG,SAAS,CAAC,YAAY,CAAC,CAAC;QAC7D,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;KAC/B;IAED,IAAI,SAAS,CAAC,iBAAiB,CAAC,EAAE;QAChC,QAAQ,CAAC,eAAe,GAAG,YAAY,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtE,OAAO,QAAQ,CAAC,iBAAiB,CAAC,CAAC;KACpC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,eAAe,CAAC,eAAoB;IAC3C,OAAO;QACL,IAAI,EAAE,aAAa,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;KAC7C,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,YAAiB;IACrC,OAAO;QACL,GAAG,EAAE,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC;KAC7B,CAAC;AACJ,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB;IACvC,IACE,aAAa,KAAK,SAAS;QAC3B,aAAa,CAAC,QAAQ,CAAC,KAAK,SAAS;QACrC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,EAC5C;QACA,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,UAAU,GAAG,EAAE,CAAC;IACtB,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,YAAY,KAAK,EAAE;QACnD,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,YAAiB,EAAE,EAAE;YAC3D,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC;QAC9C,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;KAC/D;IAED,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC;AACpC,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,cAAqB;IACpD,MAAM,SAAS,GAAG,EAAE,CAAC;IAErB,IAAI,cAAc,YAAY,KAAK,EAAE;QACnC,cAAc,CAAC,OAAO,CAAC,CAAC,SAAc,EAAE,EAAE;YACxC,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC,CAAC;KAC/C;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,iBAAwB;IAC1D,MAAM,YAAY,GAAG,EAAE,CAAC;IAExB,IAAI,iBAAiB,YAAY,KAAK,EAAE;QACtC,iBAAiB,CAAC,OAAO,CAAC,CAAC,eAAoB,EAAE,EAAE;YACjD,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,CAAC,CAAC;QACtD,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC,CAAC;KACvD;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,MAAM,SAAS,CAAC,CAAC,yBAAyB,CACxC,oBAA4D;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;QAAE,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;QAAE,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;QAC/E,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;gBACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;gBACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;gBAClC,OAAO,EAAE,KAAK;aACf,CAAC;YACF,EAAE,cAAc,CAAC;SAClB;aAAM;YACL,MAAM;gBACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;gBACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;gBACpC,OAAO,EAAE,IAAI;aACd,CAAC;YACF,EAAE,eAAe,CAAC;SACnB;KACF;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;YACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;YACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;YAClC,OAAO,EAAE,KAAK;SACf,CAAC;KACH;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;YACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;YACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;YACpC,OAAO,EAAE,IAAI;SACd,CAAC;KACH;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n BlobItemInternal,\n BlobPrefix,\n BlobType,\n LeaseStatusType,\n LeaseStateType,\n LeaseDurationType,\n CopyStatusType,\n AccessTier,\n ArchiveStatus,\n RehydratePriority,\n BlobImmutabilityPolicyMode,\n BlobTag,\n PageRange,\n ClearRange,\n BlobPropertiesInternal,\n} from \"../generated/src/models\";\nimport { DevelopmentConnectionString, HeaderConstants, URLConstants } from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n const accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nfunction decodeBase64String(value: string): Uint8Array {\n if (isNode) {\n return Buffer.from(value, \"base64\");\n } else {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n }\n}\n\nfunction ParseBoolean(content: any) {\n if (content === undefined) return undefined;\n if (content === \"true\") return true;\n if (content === \"false\") return false;\n return undefined;\n}\n\nfunction ParseBlobName(blobNameInXML: any): BlobName {\n if (blobNameInXML[\"$\"] !== undefined && blobNameInXML[\"#\"] !== undefined) {\n return {\n encoded: ParseBoolean(blobNameInXML[\"$\"][\"Encoded\"]),\n content: blobNameInXML[\"#\"] as string,\n };\n } else {\n return {\n encoded: false,\n content: blobNameInXML as string,\n };\n }\n}\n\nfunction ParseBlobProperties(blobPropertiesInXML: any): BlobPropertiesInternal {\n const blobProperties = blobPropertiesInXML;\n if (blobPropertiesInXML[\"Creation-Time\"]) {\n blobProperties.createdOn = new Date(blobPropertiesInXML[\"Creation-Time\"] as string);\n delete blobProperties[\"Creation-Time\"];\n }\n\n if (blobPropertiesInXML[\"Last-Modified\"]) {\n blobProperties.lastModified = new Date(blobPropertiesInXML[\"Last-Modified\"] as string);\n delete blobProperties[\"Last-Modified\"];\n }\n\n if (blobPropertiesInXML[\"Etag\"]) {\n blobProperties.etag = blobPropertiesInXML[\"Etag\"] as string;\n delete blobProperties[\"Etag\"];\n }\n\n if (blobPropertiesInXML[\"Content-Length\"]) {\n blobProperties.contentLength = parseFloat(blobPropertiesInXML[\"Content-Length\"] as string);\n delete blobProperties[\"Content-Length\"];\n }\n\n if (blobPropertiesInXML[\"Content-Type\"]) {\n blobProperties.contentType = blobPropertiesInXML[\"Content-Type\"] as string;\n delete blobProperties[\"Content-Type\"];\n }\n\n if (blobPropertiesInXML[\"Content-Encoding\"]) {\n blobProperties.contentEncoding = blobPropertiesInXML[\"Content-Encoding\"] as string;\n delete blobProperties[\"Content-Encoding\"];\n }\n\n if (blobPropertiesInXML[\"Content-Language\"]) {\n blobProperties.contentLanguage = blobPropertiesInXML[\"Content-Language\"] as string;\n delete blobProperties[\"Content-Language\"];\n }\n\n if (blobPropertiesInXML[\"Content-MD5\"]) {\n blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML[\"Content-MD5\"] as string);\n delete blobProperties[\"Content-MD5\"];\n }\n\n if (blobPropertiesInXML[\"Content-Disposition\"]) {\n blobProperties.contentDisposition = blobPropertiesInXML[\"Content-Disposition\"] as string;\n delete blobProperties[\"Content-Disposition\"];\n }\n\n if (blobPropertiesInXML[\"Cache-Control\"]) {\n blobProperties.cacheControl = blobPropertiesInXML[\"Cache-Control\"] as string;\n delete blobProperties[\"Cache-Control\"];\n }\n\n if (blobPropertiesInXML[\"x-ms-blob-sequence-number\"]) {\n blobProperties.blobSequenceNumber = parseFloat(\n blobPropertiesInXML[\"x-ms-blob-sequence-number\"] as string\n );\n delete blobProperties[\"x-ms-blob-sequence-number\"];\n }\n\n if (blobPropertiesInXML[\"BlobType\"]) {\n blobProperties.blobType = blobPropertiesInXML[\"BlobType\"] as BlobType;\n delete blobProperties[\"BlobType\"];\n }\n\n if (blobPropertiesInXML[\"LeaseStatus\"]) {\n blobProperties.leaseStatus = blobPropertiesInXML[\"LeaseStatus\"] as LeaseStatusType;\n delete blobProperties[\"LeaseStatus\"];\n }\n\n if (blobPropertiesInXML[\"LeaseState\"]) {\n blobProperties.leaseState = blobPropertiesInXML[\"LeaseState\"] as LeaseStateType;\n delete blobProperties[\"LeaseState\"];\n }\n\n if (blobPropertiesInXML[\"LeaseDuration\"]) {\n blobProperties.leaseDuration = blobPropertiesInXML[\"LeaseDuration\"] as LeaseDurationType;\n delete blobProperties[\"LeaseDuration\"];\n }\n\n if (blobPropertiesInXML[\"CopyId\"]) {\n blobProperties.copyId = blobPropertiesInXML[\"CopyId\"] as string;\n delete blobProperties[\"CopyId\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatus\"]) {\n blobProperties.copyStatus = blobPropertiesInXML[\"CopyStatus\"] as CopyStatusType;\n delete blobProperties[\"CopyStatus\"];\n }\n\n if (blobPropertiesInXML[\"CopySource\"]) {\n blobProperties.copySource = blobPropertiesInXML[\"CopySource\"] as string;\n delete blobProperties[\"CopySource\"];\n }\n\n if (blobPropertiesInXML[\"CopyProgress\"]) {\n blobProperties.copyProgress = blobPropertiesInXML[\"CopyProgress\"] as string;\n delete blobProperties[\"CopyProgress\"];\n }\n\n if (blobPropertiesInXML[\"CopyCompletionTime\"]) {\n blobProperties.copyCompletedOn = new Date(blobPropertiesInXML[\"CopyCompletionTime\"] as string);\n delete blobProperties[\"CopyCompletionTime\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatusDescription\"]) {\n blobProperties.copyStatusDescription = blobPropertiesInXML[\"CopyStatusDescription\"] as string;\n delete blobProperties[\"CopyStatusDescription\"];\n }\n\n if (blobPropertiesInXML[\"ServerEncrypted\"]) {\n blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML[\"ServerEncrypted\"]);\n delete blobProperties[\"ServerEncrypted\"];\n }\n\n if (blobPropertiesInXML[\"IncrementalCopy\"]) {\n blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML[\"IncrementalCopy\"]);\n delete blobProperties[\"IncrementalCopy\"];\n }\n\n if (blobPropertiesInXML[\"DestinationSnapshot\"]) {\n blobProperties.destinationSnapshot = blobPropertiesInXML[\"DestinationSnapshot\"] as string;\n delete blobProperties[\"DestinationSnapshot\"];\n }\n\n if (blobPropertiesInXML[\"DeletedTime\"]) {\n blobProperties.deletedOn = new Date(blobPropertiesInXML[\"DeletedTime\"] as string);\n delete blobProperties[\"DeletedTime\"];\n }\n\n if (blobPropertiesInXML[\"RemainingRetentionDays\"]) {\n blobProperties.remainingRetentionDays = parseFloat(\n blobPropertiesInXML[\"RemainingRetentionDays\"] as string\n );\n delete blobProperties[\"RemainingRetentionDays\"];\n }\n\n if (blobPropertiesInXML[\"AccessTier\"]) {\n blobProperties.accessTier = blobPropertiesInXML[\"AccessTier\"] as AccessTier;\n delete blobProperties[\"AccessTier\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierInferred\"]) {\n blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML[\"AccessTierInferred\"]);\n delete blobProperties[\"AccessTierInferred\"];\n }\n\n if (blobPropertiesInXML[\"ArchiveStatus\"]) {\n blobProperties.archiveStatus = blobPropertiesInXML[\"ArchiveStatus\"] as ArchiveStatus;\n delete blobProperties[\"ArchiveStatus\"];\n }\n\n if (blobPropertiesInXML[\"CustomerProvidedKeySha256\"]) {\n blobProperties.customerProvidedKeySha256 = blobPropertiesInXML[\n \"CustomerProvidedKeySha256\"\n ] as string;\n delete blobProperties[\"CustomerProvidedKeySha256\"];\n }\n\n if (blobPropertiesInXML[\"EncryptionScope\"]) {\n blobProperties.encryptionScope = blobPropertiesInXML[\"EncryptionScope\"] as string;\n delete blobProperties[\"EncryptionScope\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierChangeTime\"]) {\n blobProperties.accessTierChangedOn = new Date(\n blobPropertiesInXML[\"AccessTierChangeTime\"] as string\n );\n delete blobProperties[\"AccessTierChangeTime\"];\n }\n\n if (blobPropertiesInXML[\"TagCount\"]) {\n blobProperties.tagCount = parseFloat(blobPropertiesInXML[\"TagCount\"] as string);\n delete blobProperties[\"TagCount\"];\n }\n\n if (blobPropertiesInXML[\"Expiry-Time\"]) {\n blobProperties.expiresOn = new Date(blobPropertiesInXML[\"Expiry-Time\"] as string);\n delete blobProperties[\"Expiry-Time\"];\n }\n\n if (blobPropertiesInXML[\"Sealed\"]) {\n blobProperties.isSealed = ParseBoolean(blobPropertiesInXML[\"Sealed\"]);\n delete blobProperties[\"Sealed\"];\n }\n\n if (blobPropertiesInXML[\"RehydratePriority\"]) {\n blobProperties.rehydratePriority = blobPropertiesInXML[\n \"RehydratePriority\"\n ] as RehydratePriority;\n delete blobProperties[\"RehydratePriority\"];\n }\n\n if (blobPropertiesInXML[\"LastAccessTime\"]) {\n blobProperties.lastAccessedOn = new Date(blobPropertiesInXML[\"LastAccessTime\"] as string);\n delete blobProperties[\"LastAccessTime\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"]) {\n blobProperties.immutabilityPolicyExpiresOn = new Date(\n blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"] as string\n );\n delete blobProperties[\"ImmutabilityPolicyUntilDate\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyMode\"]) {\n blobProperties.immutabilityPolicyMode = blobPropertiesInXML[\n \"ImmutabilityPolicyMode\"\n ] as BlobImmutabilityPolicyMode;\n delete blobProperties[\"ImmutabilityPolicyMode\"];\n }\n\n if (blobPropertiesInXML[\"LegalHold\"]) {\n blobProperties.legalHold = ParseBoolean(blobPropertiesInXML[\"LegalHold\"]);\n delete blobProperties[\"LegalHold\"];\n }\n\n return blobProperties;\n}\n\nfunction ParseBlobItem(blobInXML: any): BlobItemInternal {\n const blobItem = blobInXML;\n blobItem.properties = ParseBlobProperties(blobInXML[\"Properties\"]);\n delete blobItem[\"Properties\"];\n\n blobItem.name = ParseBlobName(blobInXML[\"Name\"]);\n delete blobItem[\"Name\"];\n blobItem.deleted = ParseBoolean(blobInXML[\"Deleted\"])!;\n delete blobItem[\"Deleted\"];\n\n if (blobInXML[\"Snapshot\"]) {\n blobItem.snapshot = blobInXML[\"Snapshot\"] as string;\n delete blobItem[\"Snapshot\"];\n }\n\n if (blobInXML[\"VersionId\"]) {\n blobItem.versionId = blobInXML[\"VersionId\"] as string;\n delete blobItem[\"VersionId\"];\n }\n\n if (blobInXML[\"IsCurrentVersion\"]) {\n blobItem.isCurrentVersion = ParseBoolean(blobInXML[\"IsCurrentVersion\"]);\n delete blobItem[\"IsCurrentVersion\"];\n }\n\n if (blobInXML[\"Metadata\"]) {\n blobItem.metadata = blobInXML[\"Metadata\"];\n delete blobItem[\"Metadata\"];\n }\n\n if (blobInXML[\"Tags\"]) {\n blobItem.blobTags = ParseBlobTags(blobInXML[\"Tags\"]);\n delete blobItem[\"Tags\"];\n }\n\n if (blobInXML[\"OrMetadata\"]) {\n blobItem.objectReplicationMetadata = blobInXML[\"OrMetadata\"];\n delete blobItem[\"OrMetadata\"];\n }\n\n if (blobInXML[\"HasVersionsOnly\"]) {\n blobItem.hasVersionsOnly = ParseBoolean(blobInXML[\"HasVersionsOnly\"]);\n delete blobItem[\"HasVersionsOnly\"];\n }\n return blobItem;\n}\n\nfunction ParseBlobPrefix(blobPrefixInXML: any): BlobPrefix {\n return {\n name: ParseBlobName(blobPrefixInXML[\"Name\"]),\n };\n}\n\nfunction ParseBlobTag(blobTagInXML: any): BlobTag {\n return {\n key: blobTagInXML[\"Key\"],\n value: blobTagInXML[\"Value\"],\n };\n}\n\nfunction ParseBlobTags(blobTagsInXML: any): BlobTags | undefined {\n if (\n blobTagsInXML === undefined ||\n blobTagsInXML[\"TagSet\"] === undefined ||\n blobTagsInXML[\"TagSet\"][\"Tag\"] === undefined\n ) {\n return undefined;\n }\n\n const blobTagSet = [];\n if (blobTagsInXML[\"TagSet\"][\"Tag\"] instanceof Array) {\n blobTagsInXML[\"TagSet\"][\"Tag\"].forEach((blobTagInXML: any) => {\n blobTagSet.push(ParseBlobTag(blobTagInXML));\n });\n } else {\n blobTagSet.push(ParseBlobTag(blobTagsInXML[\"TagSet\"][\"Tag\"]));\n }\n\n return { blobTagSet: blobTagSet };\n}\n\nexport function ProcessBlobItems(blobArrayInXML: any[]): BlobItemInternal[] {\n const blobItems = [];\n\n if (blobArrayInXML instanceof Array) {\n blobArrayInXML.forEach((blobInXML: any) => {\n blobItems.push(ParseBlobItem(blobInXML));\n });\n } else {\n blobItems.push(ParseBlobItem(blobArrayInXML));\n }\n\n return blobItems;\n}\n\nexport function ProcessBlobPrefixes(blobPrefixesInXML: any[]): BlobPrefix[] {\n const blobPrefixes = [];\n\n if (blobPrefixesInXML instanceof Array) {\n blobPrefixesInXML.forEach((blobPrefixInXML: any) => {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML));\n });\n } else {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML));\n }\n\n return blobPrefixes;\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js new file mode 100644 index 00000000..788256cb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as fs from "fs"; +import * as util from "util"; +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +export async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + stream.on("readable", () => { + if (pos >= count) { + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +export async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into a buffer. + * + * @param stream - A Node.js Readable stream + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + */ +export async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +export async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +export const fsStat = util.promisify(fs.stat); +export const fsCreateReadStream = fs.createReadStream; +//# sourceMappingURL=utils.node.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map new file mode 100644 index 00000000..7326ab5d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.node.js","sourceRoot":"","sources":["../../../../src/utils/utils.node.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC,CAAC,wCAAwC;IAEpE,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;gBAChB,OAAO,EAAE,CAAC;gBACV,OAAO;aACR;YAED,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,qCAAqC;YACrC,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,+DAA+D,GAAG,gBAAgB,KAAK,EAAE,CAC1F,CACF,CAAC;aACH;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,EAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;aACR;YAED,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;YAC5C,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;QACtB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,cAAqC,EACrC,QAAyB;IAEzB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,MAAM,GAAa,EAAE,CAAC;QAC5B,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAqB,EAAE,EAAE;YAClD,MAAM,CAAC,IAAI,CAAC,IAAI,YAAY,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC3E,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAC5B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QACjC,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,EAAyB,EACzB,IAAY;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAExB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACd,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC;AAE9C,MAAM,CAAC,MAAM,kBAAkB,GAAG,EAAE,CAAC,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n if (pos >= count) {\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js new file mode 100644 index 00000000..c7ae8635 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BufferScheduler" for some browser bundlers +// when trying to bundle "BufferScheduler" +// "BufferScheduler" class is only available in Node.js runtime +export class BufferScheduler { +} +//# sourceMappingURL=BufferScheduler.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map new file mode 100644 index 00000000..7bf77330 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,6EAA6E;AAC7E,0CAA0C;AAC1C,+DAA+D;AAC/D,MAAM,OAAO,eAAe;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BufferScheduler\" for some browser bundlers\n// when trying to bundle \"BufferScheduler\"\n// \"BufferScheduler\" class is only available in Node.js runtime\nexport class BufferScheduler {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js new file mode 100644 index 00000000..0d92285c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js @@ -0,0 +1,252 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EventEmitter } from "events"; +import { PooledBuffer } from "./PooledBuffer"; +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +export class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} +//# sourceMappingURL=BufferScheduler.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map new file mode 100644 index 00000000..a6b21615 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAW9C;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,OAAO,eAAe;IAuF1B;;;;;;;;;;;OAWG;IACH,YACE,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB;QAlF3B;;WAEG;QACc,YAAO,GAAiB,IAAI,YAAY,EAAE,CAAC;QAO5D;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,gBAAW,GAAY,KAAK,CAAC;QAErC;;WAEG;QACK,YAAO,GAAY,KAAK,CAAC;QAEjC;;WAEG;QACK,8BAAyB,GAAW,CAAC,CAAC;QAO9C;;WAEG;QACK,eAAU,GAAW,CAAC,CAAC;QAE/B;;;;;;WAMG;QACK,wBAAmB,GAAa,EAAE,CAAC;QAE3C;;WAEG;QACK,qBAAgB,GAAW,CAAC,CAAC;QAErC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAEtC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,EAAE,CAAC,CAAC;SACtF;QAED,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAC3B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,EAAE;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC1E,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;gBAEhC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;oBACvB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;iBACvB;YACH,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAClC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;gBAC3B,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;gBACxB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAChC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;gBACpB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;gBAC/B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;iBACR;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;oBAC5D,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;wBACxE,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;wBACzD,IAAI,CAAC,eAAe,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;qBAClB;yBAAM,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;qBACR;yBAAM;wBACL,OAAO,EAAE,CAAC;qBACX;iBACF;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACK,oBAAoB,CAAC,IAAY;QACvC,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACK,kCAAkC,CAAC,MAAqB;QAC9D,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC7F;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC9D;QAED,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;QACrC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;OAQG;IACK,WAAW;QACjB,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC/C,IAAI,MAAoB,CAAC;YAEzB,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;gBAChC,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;aACjD;iBAAM;gBACL,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;oBACrC,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;iBACnB;qBAAM;oBACL,gDAAgD;oBAChD,OAAO,KAAK,CAAC;iBACd;aACF;YAED,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;SAChC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,uBAAuB;QACnC,IAAI,MAAgC,CAAC;QACrC,GAAG;YACD,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;aACR;YAED,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,MAAM,EAAE;gBACV,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;aACrC;SACF,QAAQ,MAAM,EAAE;IACnB,CAAC;IAED;;;;OAIG;IACK,KAAK,CAAC,sBAAsB,CAAC,MAAoB;QACvD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;SACH;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;SACR;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAChC,CAAC;IAED;;;;OAIG;IACK,WAAW,CAAC,MAAoB;QACtC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;YAC5D,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;SACxB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js new file mode 100644 index 00000000..f035c027 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * This class generates a readable stream from the data in an array of buffers. + */ +export class BuffersStream extends Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} +//# sourceMappingURL=BuffersStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map new file mode 100644 index 00000000..b5b81872 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BuffersStream.js","sourceRoot":"","sources":["../../../../storage-common/src/BuffersStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAmB,MAAM,QAAQ,CAAC;AAOnD;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,QAAQ;IAgBzC;;;;;;OAMG;IACH,YACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,YAAO,GAAP,OAAO,CAAU;QACjB,eAAU,GAAV,UAAU,CAAQ;QAI1B,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;QAE3B,4DAA4D;QAC5D,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;SACjC;QACD,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;SACpF;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAa;QACxB,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC7C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACjB;QAED,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;SACnC;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;YAC3D,2DAA2D;YAC3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;YAC3E,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;YACrF,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;gBACxB,uBAAuB;gBACvB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;gBACnC,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;aACP;iBAAM;gBACL,wBAAwB;gBACxB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;oBAC/C,4DAA4D;oBAC5D,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;iBACpB;qBAAM;oBACL,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;iBACtC;gBACD,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;aAChB;SACF;QAED,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SACtC;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js new file mode 100644 index 00000000..b1539054 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BuffersStream } from "./BuffersStream"; +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +export class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} +//# sourceMappingURL=PooledBuffer.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map new file mode 100644 index 00000000..e2e64153 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PooledBuffer.js","sourceRoot":"","sources":["../../../../storage-common/src/PooledBuffer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD;;GAEG;AACH,6DAA6D;AAC7D,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;GAOG;AACH,MAAM,OAAO,YAAY;IA4CvB,YAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB;QA3CtE;;;WAGG;QACK,YAAO,GAAa,EAAE,CAAC;QAwC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QAEf,WAAW;QACX,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;YAClC,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;aACvB;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;SAClC;IACH,CAAC;IA5CD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAyCD;;;;;;;;OAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB;QAChD,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;QACrB,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;YAClC,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAC/B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;YAC1B,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;YACD,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;SACF;QAED,mCAAmC;QACnC,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACrB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;YACtB,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SAC7C;IACH,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACpD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js new file mode 100644 index 00000000..3eca4771 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler.browser"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map new file mode 100644 index 00000000..fb2fafa7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler.browser\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js new file mode 100644 index 00000000..993a2639 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map new file mode 100644 index 00000000..9538bda2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-common/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js new file mode 100644 index 00000000..e7e5f2f0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const AVRO_SYNC_MARKER_SIZE = 16; +export const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +export const AVRO_CODEC_KEY = "avro.codec"; +export const AVRO_SCHEMA_KEY = "avro.schema"; +//# sourceMappingURL=AvroConstants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map new file mode 100644 index 00000000..663b8f22 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroConstants.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroConstants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,qBAAqB,GAAW,EAAE,CAAC;AAChD,MAAM,CAAC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,cAAc,GAAW,YAAY,CAAC;AACnD,MAAM,CAAC,MAAM,eAAe,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js new file mode 100644 index 00000000..da66dc53 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js @@ -0,0 +1,311 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +export class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} +//# sourceMappingURL=AvroParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map new file mode 100644 index 00000000..d4a55bc2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroParser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqBlC,MAAM,OAAO,UAAU;IACrB;;;;;;OAMG;IACI,MAAM,CAAC,KAAK,CAAC,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAC9E,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;SACpC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;OAKG;IACK,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAChE,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;IAChB,CAAC;IAED,6DAA6D;IAC7D,8EAA8E;IAC9E,oFAAoF;IAC5E,MAAM,CAAC,KAAK,CAAC,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;QAC1B,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAClD,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;SACxB,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE,CAAC,mDAAmD;QAErG,IAAI,YAAY,EAAE;YAChB,6BAA6B;YAC7B,0CAA0C;YAC1C,aAAa,GAAG,aAAa,CAAC;YAC9B,mBAAmB,GAAG,SAAS,CAAC,CAAC,WAAW;YAC5C,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,mBAAmB,CAAC;gBACrD,mBAAmB,IAAI,GAAG,CAAC,CAAC,SAAS;aACtC,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;gBAClE,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;aACtC;YACD,OAAO,GAAG,CAAC;SACZ;QAED,OAAO,CAAC,aAAa,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC;IACrD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ;QAC1B,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;YACX,OAAO,IAAI,CAAC;SACb;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;YAClB,OAAO,KAAK,CAAC;SACd;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;SAC5C;IACH,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACjE,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC1D,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QACtC,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACzD,0GAA0G;QAC1G,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACpD,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,OAA8B,EAAE,EACN,EAAE;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;QACzD,CAAC,CAAC;QAEF,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;QACnC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;QACtB,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;gBACb,qBAAqB;gBACrB,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;aAChB;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBACtD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAClB;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AAOD,IAAK,WAOJ;AAPD,WAAK,WAAW;IACd,gCAAiB,CAAA;IACjB,4BAAa,CAAA;IACb,8BAAe,CAAA;IACf,0BAAW,CAAA;IACX,8BAAe,CAAA;IACf,8BAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,QAOf;AAYD,IAAK,aASJ;AATD,WAAK,aAAa;IAChB,8BAAa,CAAA;IACb,oCAAmB,CAAA;IACnB,4BAAW,CAAA;IACX,8BAAa,CAAA;IACb,gCAAe,CAAA;IACf,kCAAiB,CAAA;IACjB,gCAAe,CAAA;IACf,kCAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,QASjB;AAED,MAAM,OAAgB,QAAQ;IAS5B;;OAEG;IACI,MAAM,CAAC,UAAU,CAAC,MAAuB;QAC9C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1C;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAChC,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;SAC1D;IACH,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAc;QAC5C,QAAQ,MAAM,EAAE;YACd,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;YACxD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,EAAE,CAAC,CAAC;SACrD;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,MAAa;QAC1C,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC5D,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAoB;QAClD,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACzB,kDAAkD;QAClD,IAAI;YACF,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;SACxC;QAAC,OAAO,GAAQ,EAAE;YACjB,+BAA+B;SAChC;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,EAAE,CAAC,CAAC;iBACjF;gBAED,gDAAgD;gBAChD,MAAM,MAAM,GAA6B,EAAE,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;oBACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBACtD;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;oBACnB,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,EAAE,CAAC,CAAC;iBACpF;gBACD,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;gBAClB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7D,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,IAAI,OAAO,MAAM,EAAE,CAAC,CAAC;SAChE;IACH,CAAC;CACF;AAED,MAAM,iBAAkB,SAAQ,QAAQ;IAGtC,YAAY,SAAwB;QAClC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAEM,IAAI,CACT,MAAoB,EACpB,UAAiC,EAAE;QAEnC,QAAQ,IAAI,CAAC,UAAU,EAAE;YACvB,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;SAC7C;IACH,CAAC;CACF;AAED,MAAM,YAAa,SAAQ,QAAQ;IAGjC,YAAY,OAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;CACF;AAED,MAAM,aAAc,SAAQ,QAAQ;IAGlC,YAAY,KAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;IACtB,CAAC;IAEM,KAAK,CAAC,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACtD,CAAC;CACF;AAED,MAAM,WAAY,SAAQ,QAAQ;IAGhC,YAAY,QAAkB;QAC5B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAEM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACnE,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,EACJ,EAAE;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACtC,CAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;CACF;AAED,MAAM,cAAe,SAAQ,QAAQ;IAInC,YAAY,MAAgC,EAAE,IAAY;QACxD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;IACpB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;QACjD,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;QAC/B,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;gBAC3D,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aAC7D;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise { // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => { \n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js new file mode 100644 index 00000000..86ac741f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroReadable { +} +//# sourceMappingURL=AvroReadable.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map new file mode 100644 index 00000000..2a421a46 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadable.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadable.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAelC,MAAM,OAAgB,YAAY;CAGjC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js new file mode 100644 index 00000000..5d58a863 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro blob was aborted."); +export class AvroReadableFromBlob extends AvroReadable { + constructor(blob) { + super(); + this._blob = blob; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + size = Math.min(size, this._blob.size - this._position); + if (size <= 0) { + return new Uint8Array(); + } + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + function cleanUp() { + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + } + function abortHandler() { + fileReader.abort(); + cleanUp(); + reject(ABORT_ERROR); + } + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + fileReader.onloadend = (ev) => { + cleanUp(); + resolve(new Uint8Array(ev.target.result)); + }; + fileReader.onerror = () => { + cleanUp(); + reject(); + }; + fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size))); + }); + } +} +//# sourceMappingURL=AvroReadableFromBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map new file mode 100644 index 00000000..240fbb16 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromBlob.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromBlob.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,yCAAyC,CAAC,CAAC;AAE9E,MAAM,OAAO,oBAAqB,SAAQ,YAAY;IAIpD,YAAY,IAAU;QACpB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAED,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;QACnE,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC;QACxD,IAAI,IAAI,IAAI,CAAC,EAAE;YACb,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;QACpC,OAAO,IAAI,OAAO,CAAa,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAEjD,SAAS,OAAO;gBACd,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBACjE;YACH,CAAC;YAED,SAAS,YAAY;gBACnB,UAAU,CAAC,KAAK,EAAE,CAAC;gBACnB,OAAO,EAAE,CAAC;gBACV,MAAM,CAAC,WAAW,CAAC,CAAC;YACtB,CAAC;YAED,IAAI,OAAO,CAAC,WAAW,EAAE;gBACvB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aAC7D;YAED,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;gBACjC,OAAO,EAAE,CAAC;gBACV,OAAO,CAAC,IAAI,UAAU,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7C,CAAC,CAAC;YAEF,UAAU,CAAC,OAAO,GAAG,GAAG,EAAE;gBACxB,OAAO,EAAE,CAAC;gBACV,MAAM,EAAE,CAAC;YACX,CAAC,CAAC;YAEF,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;QAC3F,CAAC,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro blob was aborted.\");\n\nexport class AvroReadableFromBlob extends AvroReadable {\n private _position: number;\n private _blob: Blob;\n\n constructor(blob: Blob) {\n super();\n this._blob = blob;\n this._position = 0;\n }\n\n public get position(): number {\n return this._position;\n }\n\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n size = Math.min(size, this._blob.size - this._position);\n if (size <= 0) {\n return new Uint8Array();\n }\n\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n\n function cleanUp(): void {\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n }\n\n function abortHandler(): void {\n fileReader.abort();\n cleanUp();\n reject(ABORT_ERROR);\n }\n\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", abortHandler);\n }\n\n fileReader.onloadend = (ev: any) => {\n cleanUp();\n resolve(new Uint8Array(ev.target!.result));\n };\n\n fileReader.onerror = () => {\n cleanUp();\n reject();\n };\n\n fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size)));\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js new file mode 100644 index 00000000..197fbd3f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro stream was aborted."); +export class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} +//# sourceMappingURL=AvroReadableFromStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map new file mode 100644 index 00000000..9cba2170 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromStream.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,2CAA2C,CAAC,CAAC;AAEhF,MAAM,OAAO,sBAAuB,SAAQ,YAAY;IAWtD,YAAY,QAA+B;QACzC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAXO,YAAY,CAAC,IAAqB;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC1B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAOD,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;;QACnE,IAAI,MAAA,OAAO,CAAC,WAAW,0CAAE,OAAO,EAAE;YAChC,MAAM,WAAW,CAAC;SACnB;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,EAAE,CAAC,CAAC;SAC/D;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QACD,uCAAuC;QACvC,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxC,IAAI,KAAK,EAAE;YACT,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;YAC/B,gEAAgE;YAChE,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;SACjC;aAAM;YACL,oDAAoD;YACpD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,4DAA4D;gBAC5D,MAAM,OAAO,GAAe,GAAG,EAAE;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;qBACjE;gBACH,CAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,GAAG,EAAE;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChD,IAAI,aAAa,EAAE;wBACjB,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;wBACvC,OAAO,EAAE,CAAC;wBACV,wEAAwE;wBACxE,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;qBAC3C;gBACH,CAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,GAAG,EAAE;oBACtC,OAAO,EAAE,CAAC;oBACV,MAAM,EAAE,CAAC;gBACX,CAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,GAAG,EAAE;oBACpC,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;gBACtB,CAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBAC9D;gBACD,2DAA2D;YAC7D,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js new file mode 100644 index 00000000..84213754 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncGenerator, __await } from "tslib"; +// TODO: Do a review of non-interfaces +/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */ +import "@azure/core-paging"; +import { AVRO_CODEC_KEY, AVRO_INIT_BYTES, AVRO_SCHEMA_KEY, AVRO_SYNC_MARKER_SIZE, } from "./AvroConstants"; +import { AvroParser, AvroType } from "./AvroParser"; +import { arraysEqual } from "./utils/utils.common"; +export class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return __asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield __await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield __await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield __await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield __await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield __await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield __await(result); + } + }); + } +} +//# sourceMappingURL=AvroReader.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map new file mode 100644 index 00000000..e35d5bd9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReader.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAElC,sCAAsC;AACtC,iEAAiE;AAEjE,OAAO,oBAAoB,CAAC;AAC5B,OAAO,EACL,cAAc,EACd,eAAe,EACf,eAAe,EACf,qBAAqB,GACtB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AAGpD,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAanD,MAAM,OAAO,UAAU;IAuCrB,YACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC;QAEhC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;QAChD,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;IACrD,CAAC;IAhCD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAGD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IA2BO,KAAK,CAAC,UAAU,CAAC,UAA4B,EAAE;QACrD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;SAChD;QAED,sEAAsE;QACtE,sCAAsC;QACtC,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,iBAAiB;QACjB,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAI,CAAC,CAAC,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,6DAA6D;QAC7D,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,mBAAmB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAE7C,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;YAC3B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;SAC1E;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,oBAAoB;QACpB,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAElF,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;gBAC1C,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;aAChC;SACF;IACH,CAAC;IAEM,OAAO;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;IAChE,CAAC;IAEa,YAAY,CACxB,UAA4B,EAAE;;YAE9B,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACtB,cAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;aAChC;YAED,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;gBACrB,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;iBACjC,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;gBAErB,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;oBACrC,MAAM,MAAM,GAAG,cAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;qBACjC,CAAC,CAAA,CAAC;oBAEH,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;oBACzE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;wBAC3C,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;qBACrD;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;yBACjC,CAAC,CAAA,CAAC;qBACJ;oBAAC,OAAO,GAAQ,EAAE;wBACjB,gCAAgC;wBAChC,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;qBACjC;oBAED,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;wBACpC,oBAAoB;wBACpB,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;qBACnF;iBACF;gBACD,oBAAM,MAAM,CAAA,CAAC;aACd;QACH,CAAC;KAAA;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js new file mode 100644 index 00000000..475b1d61 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromBlob } from "./AvroReadableFromBlob"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map new file mode 100644 index 00000000..ef6000cc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromBlob } from \"./AvroReadableFromBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js new file mode 100644 index 00000000..15ab942a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromStream } from "./AvroReadableFromStream"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map new file mode 100644 index 00000000..c46e8668 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromStream } from \"./AvroReadableFromStream\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js new file mode 100644 index 00000000..135cf5b2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map new file mode 100644 index 00000000..26054593 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../../storage-internal-avro/src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,UAAU,WAAW,CAAC,CAAa,EAAE,CAAa;IACtD,IAAI,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACzB,kCAAkC;IAClC,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;QAAE,OAAO,KAAK,CAAC;IACzC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;QAAE,OAAO,KAAK,CAAC;IAExC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAAE,OAAO,KAAK,CAAC;KACjC;IACD,OAAO,IAAI,CAAC;AACd,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist/index.js b/node_modules/@azure/storage-blob/dist/index.js new file mode 100644 index 00000000..98c1a8b2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js @@ -0,0 +1,25339 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var coreHttp = require('@azure/core-http'); +var tslib = require('tslib'); +var coreTracing = require('@azure/core-tracing'); +var logger$1 = require('@azure/logger'); +var abortController = require('@azure/abort-controller'); +var os = require('os'); +var crypto = require('crypto'); +var stream = require('stream'); +require('@azure/core-paging'); +var coreLro = require('@azure/core-lro'); +var events = require('events'); +var fs = require('fs'); +var util = require('util'); + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + BlobServiceProperties: BlobServiceProperties, + Logging: Logging, + RetentionPolicy: RetentionPolicy, + Metrics: Metrics, + CorsRule: CorsRule, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + BlobServiceStatistics: BlobServiceStatistics, + GeoReplication: GeoReplication, + ListContainersSegmentResponse: ListContainersSegmentResponse, + ContainerItem: ContainerItem, + ContainerProperties: ContainerProperties, + KeyInfo: KeyInfo, + UserDelegationKey: UserDelegationKey, + FilterBlobSegment: FilterBlobSegment, + FilterBlobItem: FilterBlobItem, + BlobTags: BlobTags, + BlobTag: BlobTag, + SignedIdentifier: SignedIdentifier, + AccessPolicy: AccessPolicy, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + BlobFlatListSegment: BlobFlatListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPropertiesInternal: BlobPropertiesInternal, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobPrefix: BlobPrefix, + BlockLookupList: BlockLookupList, + BlockList: BlockList, + Block: Block, + PageList: PageList, + PageRange: PageRange, + ClearRange: ClearRange, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + QueryFormat: QueryFormat, + DelimitedTextConfiguration: DelimitedTextConfiguration, + JsonTextConfiguration: JsonTextConfiguration, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2021-06-08", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Service. */ +class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$5 +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Container. */ +class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$4 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Blob. */ +class Blob$1 { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders + }, + 202: { + headersMapper: BlobSetTierHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$3 +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders + } + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders + } + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a PageBlob. */ +class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer: serializer$2 +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource + ], + isXML: true, + serializer: xmlSerializer$2 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a AppendBlob. */ +class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition + ], + mediaType: "binary", + serializer: serializer$1 +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition + ], + isXML: true, + serializer: xmlSerializer$1 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a BlockBlob. */ +class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + contentType1, + accept2, + blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer +}; + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "12.10.0"; +const SERVICE_VERSION = "2021-06-08"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + +// Copyright (c) Microsoft Corporation. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + const accountName = getAccountNameFromUrl(blobEndpoint); + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = coreHttp.URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port), use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobProperties(blobPropertiesInXML) { + const blobProperties = blobPropertiesInXML; + if (blobPropertiesInXML["Creation-Time"]) { + blobProperties.createdOn = new Date(blobPropertiesInXML["Creation-Time"]); + delete blobProperties["Creation-Time"]; + } + if (blobPropertiesInXML["Last-Modified"]) { + blobProperties.lastModified = new Date(blobPropertiesInXML["Last-Modified"]); + delete blobProperties["Last-Modified"]; + } + if (blobPropertiesInXML["Etag"]) { + blobProperties.etag = blobPropertiesInXML["Etag"]; + delete blobProperties["Etag"]; + } + if (blobPropertiesInXML["Content-Length"]) { + blobProperties.contentLength = parseFloat(blobPropertiesInXML["Content-Length"]); + delete blobProperties["Content-Length"]; + } + if (blobPropertiesInXML["Content-Type"]) { + blobProperties.contentType = blobPropertiesInXML["Content-Type"]; + delete blobProperties["Content-Type"]; + } + if (blobPropertiesInXML["Content-Encoding"]) { + blobProperties.contentEncoding = blobPropertiesInXML["Content-Encoding"]; + delete blobProperties["Content-Encoding"]; + } + if (blobPropertiesInXML["Content-Language"]) { + blobProperties.contentLanguage = blobPropertiesInXML["Content-Language"]; + delete blobProperties["Content-Language"]; + } + if (blobPropertiesInXML["Content-MD5"]) { + blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML["Content-MD5"]); + delete blobProperties["Content-MD5"]; + } + if (blobPropertiesInXML["Content-Disposition"]) { + blobProperties.contentDisposition = blobPropertiesInXML["Content-Disposition"]; + delete blobProperties["Content-Disposition"]; + } + if (blobPropertiesInXML["Cache-Control"]) { + blobProperties.cacheControl = blobPropertiesInXML["Cache-Control"]; + delete blobProperties["Cache-Control"]; + } + if (blobPropertiesInXML["x-ms-blob-sequence-number"]) { + blobProperties.blobSequenceNumber = parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]); + delete blobProperties["x-ms-blob-sequence-number"]; + } + if (blobPropertiesInXML["BlobType"]) { + blobProperties.blobType = blobPropertiesInXML["BlobType"]; + delete blobProperties["BlobType"]; + } + if (blobPropertiesInXML["LeaseStatus"]) { + blobProperties.leaseStatus = blobPropertiesInXML["LeaseStatus"]; + delete blobProperties["LeaseStatus"]; + } + if (blobPropertiesInXML["LeaseState"]) { + blobProperties.leaseState = blobPropertiesInXML["LeaseState"]; + delete blobProperties["LeaseState"]; + } + if (blobPropertiesInXML["LeaseDuration"]) { + blobProperties.leaseDuration = blobPropertiesInXML["LeaseDuration"]; + delete blobProperties["LeaseDuration"]; + } + if (blobPropertiesInXML["CopyId"]) { + blobProperties.copyId = blobPropertiesInXML["CopyId"]; + delete blobProperties["CopyId"]; + } + if (blobPropertiesInXML["CopyStatus"]) { + blobProperties.copyStatus = blobPropertiesInXML["CopyStatus"]; + delete blobProperties["CopyStatus"]; + } + if (blobPropertiesInXML["CopySource"]) { + blobProperties.copySource = blobPropertiesInXML["CopySource"]; + delete blobProperties["CopySource"]; + } + if (blobPropertiesInXML["CopyProgress"]) { + blobProperties.copyProgress = blobPropertiesInXML["CopyProgress"]; + delete blobProperties["CopyProgress"]; + } + if (blobPropertiesInXML["CopyCompletionTime"]) { + blobProperties.copyCompletedOn = new Date(blobPropertiesInXML["CopyCompletionTime"]); + delete blobProperties["CopyCompletionTime"]; + } + if (blobPropertiesInXML["CopyStatusDescription"]) { + blobProperties.copyStatusDescription = blobPropertiesInXML["CopyStatusDescription"]; + delete blobProperties["CopyStatusDescription"]; + } + if (blobPropertiesInXML["ServerEncrypted"]) { + blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML["ServerEncrypted"]); + delete blobProperties["ServerEncrypted"]; + } + if (blobPropertiesInXML["IncrementalCopy"]) { + blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML["IncrementalCopy"]); + delete blobProperties["IncrementalCopy"]; + } + if (blobPropertiesInXML["DestinationSnapshot"]) { + blobProperties.destinationSnapshot = blobPropertiesInXML["DestinationSnapshot"]; + delete blobProperties["DestinationSnapshot"]; + } + if (blobPropertiesInXML["DeletedTime"]) { + blobProperties.deletedOn = new Date(blobPropertiesInXML["DeletedTime"]); + delete blobProperties["DeletedTime"]; + } + if (blobPropertiesInXML["RemainingRetentionDays"]) { + blobProperties.remainingRetentionDays = parseFloat(blobPropertiesInXML["RemainingRetentionDays"]); + delete blobProperties["RemainingRetentionDays"]; + } + if (blobPropertiesInXML["AccessTier"]) { + blobProperties.accessTier = blobPropertiesInXML["AccessTier"]; + delete blobProperties["AccessTier"]; + } + if (blobPropertiesInXML["AccessTierInferred"]) { + blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML["AccessTierInferred"]); + delete blobProperties["AccessTierInferred"]; + } + if (blobPropertiesInXML["ArchiveStatus"]) { + blobProperties.archiveStatus = blobPropertiesInXML["ArchiveStatus"]; + delete blobProperties["ArchiveStatus"]; + } + if (blobPropertiesInXML["CustomerProvidedKeySha256"]) { + blobProperties.customerProvidedKeySha256 = blobPropertiesInXML["CustomerProvidedKeySha256"]; + delete blobProperties["CustomerProvidedKeySha256"]; + } + if (blobPropertiesInXML["EncryptionScope"]) { + blobProperties.encryptionScope = blobPropertiesInXML["EncryptionScope"]; + delete blobProperties["EncryptionScope"]; + } + if (blobPropertiesInXML["AccessTierChangeTime"]) { + blobProperties.accessTierChangedOn = new Date(blobPropertiesInXML["AccessTierChangeTime"]); + delete blobProperties["AccessTierChangeTime"]; + } + if (blobPropertiesInXML["TagCount"]) { + blobProperties.tagCount = parseFloat(blobPropertiesInXML["TagCount"]); + delete blobProperties["TagCount"]; + } + if (blobPropertiesInXML["Expiry-Time"]) { + blobProperties.expiresOn = new Date(blobPropertiesInXML["Expiry-Time"]); + delete blobProperties["Expiry-Time"]; + } + if (blobPropertiesInXML["Sealed"]) { + blobProperties.isSealed = ParseBoolean(blobPropertiesInXML["Sealed"]); + delete blobProperties["Sealed"]; + } + if (blobPropertiesInXML["RehydratePriority"]) { + blobProperties.rehydratePriority = blobPropertiesInXML["RehydratePriority"]; + delete blobProperties["RehydratePriority"]; + } + if (blobPropertiesInXML["LastAccessTime"]) { + blobProperties.lastAccessedOn = new Date(blobPropertiesInXML["LastAccessTime"]); + delete blobProperties["LastAccessTime"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyUntilDate"]) { + blobProperties.immutabilityPolicyExpiresOn = new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]); + delete blobProperties["ImmutabilityPolicyUntilDate"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyMode"]) { + blobProperties.immutabilityPolicyMode = blobPropertiesInXML["ImmutabilityPolicyMode"]; + delete blobProperties["ImmutabilityPolicyMode"]; + } + if (blobPropertiesInXML["LegalHold"]) { + blobProperties.legalHold = ParseBoolean(blobPropertiesInXML["LegalHold"]); + delete blobProperties["LegalHold"]; + } + return blobProperties; +} +function ParseBlobItem(blobInXML) { + const blobItem = blobInXML; + blobItem.properties = ParseBlobProperties(blobInXML["Properties"]); + delete blobItem["Properties"]; + blobItem.name = ParseBlobName(blobInXML["Name"]); + delete blobItem["Name"]; + blobItem.deleted = ParseBoolean(blobInXML["Deleted"]); + delete blobItem["Deleted"]; + if (blobInXML["Snapshot"]) { + blobItem.snapshot = blobInXML["Snapshot"]; + delete blobItem["Snapshot"]; + } + if (blobInXML["VersionId"]) { + blobItem.versionId = blobInXML["VersionId"]; + delete blobItem["VersionId"]; + } + if (blobInXML["IsCurrentVersion"]) { + blobItem.isCurrentVersion = ParseBoolean(blobInXML["IsCurrentVersion"]); + delete blobItem["IsCurrentVersion"]; + } + if (blobInXML["Metadata"]) { + blobItem.metadata = blobInXML["Metadata"]; + delete blobItem["Metadata"]; + } + if (blobInXML["Tags"]) { + blobItem.blobTags = ParseBlobTags(blobInXML["Tags"]); + delete blobItem["Tags"]; + } + if (blobInXML["OrMetadata"]) { + blobItem.objectReplicationMetadata = blobInXML["OrMetadata"]; + delete blobItem["OrMetadata"]; + } + if (blobInXML["HasVersionsOnly"]) { + blobItem.hasVersionsOnly = ParseBoolean(blobInXML["HasVersionsOnly"]); + delete blobItem["HasVersionsOnly"]; + } + return blobItem; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends coreHttp.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +class TelemetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (coreHttp.isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os__namespace) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} + +// Copyright (c) Microsoft Corporation. +const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + coreHttp.generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), + coreHttp.logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (coreHttp.isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(coreHttp.disableResponseDecompressionPolicy()); + } + factories.push(coreHttp.isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const packageName = "azure-storage-blob"; +const packageVersion = "12.10.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2021-06-08"; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (coreHttp.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a span using the global tracer. + * @internal + */ +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} + +// Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} + +// Copyright (c) Microsoft Corporation. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new Blob$1(clientContext); + } + if (!leaseId) { + leaseId = coreHttp.generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreHttp.delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + stream.on("readable", () => { + if (pos >= count) { + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreHttp.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) + ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} + +Object.defineProperty(exports, 'BaseRequestPolicy', { + enumerable: true, + get: function () { return coreHttp.BaseRequestPolicy; } +}); +Object.defineProperty(exports, 'HttpHeaders', { + enumerable: true, + get: function () { return coreHttp.HttpHeaders; } +}); +Object.defineProperty(exports, 'RequestPolicyOptions', { + enumerable: true, + get: function () { return coreHttp.RequestPolicyOptions; } +}); +Object.defineProperty(exports, 'RestError', { + enumerable: true, + get: function () { return coreHttp.RestError; } +}); +Object.defineProperty(exports, 'WebResource', { + enumerable: true, + get: function () { return coreHttp.WebResource; } +}); +Object.defineProperty(exports, 'deserializationPolicy', { + enumerable: true, + get: function () { return coreHttp.deserializationPolicy; } +}); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/storage-blob/dist/index.js.map b/node_modules/@azure/storage-blob/dist/index.js.map new file mode 100644 index 00000000..04a48816 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/generated/src/models/mappers.ts","../src/generated/src/models/parameters.ts","../src/generated/src/operations/service.ts","../src/generated/src/operations/container.ts","../src/generated/src/operations/blob.ts","../src/generated/src/operations/pageBlob.ts","../src/generated/src/operations/appendBlob.ts","../src/generated/src/operations/blockBlob.ts","../src/log.ts","../src/utils/constants.ts","../src/utils/utils.common.ts","../src/policies/StorageBrowserPolicy.ts","../src/StorageBrowserPolicyFactory.ts","../src/policies/StorageRetryPolicy.ts","../src/StorageRetryPolicyFactory.ts","../src/policies/CredentialPolicy.ts","../src/policies/AnonymousCredentialPolicy.ts","../src/credentials/Credential.ts","../src/credentials/AnonymousCredential.ts","../src/policies/TelemetryPolicy.ts","../src/TelemetryPolicyFactory.ts","../src/utils/cache.ts","../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts","../src/Pipeline.ts","../src/policies/StorageSharedKeyCredentialPolicy.ts","../src/credentials/StorageSharedKeyCredential.ts","../src/generated/src/storageClientContext.ts","../src/StorageClient.ts","../src/utils/tracing.ts","../src/sas/BlobSASPermissions.ts","../src/sas/ContainerSASPermissions.ts","../src/credentials/UserDelegationKeyCredential.ts","../src/sas/SasIPRange.ts","../src/sas/SASQueryParameters.ts","../src/sas/BlobSASSignatureValues.ts","../src/BlobLeaseClient.ts","../src/utils/RetriableReadableStream.ts","../src/BlobDownloadResponse.ts","../../storage-internal-avro/src/AvroConstants.ts","../../storage-internal-avro/src/AvroParser.ts","../../storage-internal-avro/src/utils/utils.common.ts","../../storage-internal-avro/src/AvroReader.ts","../../storage-internal-avro/src/AvroReadable.ts","../../storage-internal-avro/src/AvroReadableFromStream.ts","../src/utils/BlobQuickQueryStream.ts","../src/BlobQueryResponse.ts","../src/models.ts","../src/PageBlobRangeResponse.ts","../src/pollers/BlobStartCopyFromUrlPoller.ts","../src/Range.ts","../src/utils/Batch.ts","../../storage-common/src/BuffersStream.ts","../../storage-common/src/PooledBuffer.ts","../../storage-common/src/BufferScheduler.ts","../src/utils/utils.node.ts","../src/Clients.ts","../src/BatchUtils.ts","../src/BatchResponseParser.ts","../src/utils/Mutex.ts","../src/BlobBatch.ts","../src/BlobBatchClient.ts","../src/ContainerClient.ts","../src/sas/AccountSASPermissions.ts","../src/sas/AccountSASResourceTypes.ts","../src/sas/AccountSASServices.ts","../src/sas/AccountSASSignatureValues.ts","../src/BlobServiceClient.ts"],"sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2021-06-08\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.10.0\";\nexport const SERVICE_VERSION: string = \"2021-06-08\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n BlobItemInternal,\n BlobPrefix,\n BlobType,\n LeaseStatusType,\n LeaseStateType,\n LeaseDurationType,\n CopyStatusType,\n AccessTier,\n ArchiveStatus,\n RehydratePriority,\n BlobImmutabilityPolicyMode,\n BlobTag,\n PageRange,\n ClearRange,\n BlobPropertiesInternal,\n} from \"../generated/src/models\";\nimport { DevelopmentConnectionString, HeaderConstants, URLConstants } from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n const accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nfunction decodeBase64String(value: string): Uint8Array {\n if (isNode) {\n return Buffer.from(value, \"base64\");\n } else {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n }\n}\n\nfunction ParseBoolean(content: any) {\n if (content === undefined) return undefined;\n if (content === \"true\") return true;\n if (content === \"false\") return false;\n return undefined;\n}\n\nfunction ParseBlobName(blobNameInXML: any): BlobName {\n if (blobNameInXML[\"$\"] !== undefined && blobNameInXML[\"#\"] !== undefined) {\n return {\n encoded: ParseBoolean(blobNameInXML[\"$\"][\"Encoded\"]),\n content: blobNameInXML[\"#\"] as string,\n };\n } else {\n return {\n encoded: false,\n content: blobNameInXML as string,\n };\n }\n}\n\nfunction ParseBlobProperties(blobPropertiesInXML: any): BlobPropertiesInternal {\n const blobProperties = blobPropertiesInXML;\n if (blobPropertiesInXML[\"Creation-Time\"]) {\n blobProperties.createdOn = new Date(blobPropertiesInXML[\"Creation-Time\"] as string);\n delete blobProperties[\"Creation-Time\"];\n }\n\n if (blobPropertiesInXML[\"Last-Modified\"]) {\n blobProperties.lastModified = new Date(blobPropertiesInXML[\"Last-Modified\"] as string);\n delete blobProperties[\"Last-Modified\"];\n }\n\n if (blobPropertiesInXML[\"Etag\"]) {\n blobProperties.etag = blobPropertiesInXML[\"Etag\"] as string;\n delete blobProperties[\"Etag\"];\n }\n\n if (blobPropertiesInXML[\"Content-Length\"]) {\n blobProperties.contentLength = parseFloat(blobPropertiesInXML[\"Content-Length\"] as string);\n delete blobProperties[\"Content-Length\"];\n }\n\n if (blobPropertiesInXML[\"Content-Type\"]) {\n blobProperties.contentType = blobPropertiesInXML[\"Content-Type\"] as string;\n delete blobProperties[\"Content-Type\"];\n }\n\n if (blobPropertiesInXML[\"Content-Encoding\"]) {\n blobProperties.contentEncoding = blobPropertiesInXML[\"Content-Encoding\"] as string;\n delete blobProperties[\"Content-Encoding\"];\n }\n\n if (blobPropertiesInXML[\"Content-Language\"]) {\n blobProperties.contentLanguage = blobPropertiesInXML[\"Content-Language\"] as string;\n delete blobProperties[\"Content-Language\"];\n }\n\n if (blobPropertiesInXML[\"Content-MD5\"]) {\n blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML[\"Content-MD5\"] as string);\n delete blobProperties[\"Content-MD5\"];\n }\n\n if (blobPropertiesInXML[\"Content-Disposition\"]) {\n blobProperties.contentDisposition = blobPropertiesInXML[\"Content-Disposition\"] as string;\n delete blobProperties[\"Content-Disposition\"];\n }\n\n if (blobPropertiesInXML[\"Cache-Control\"]) {\n blobProperties.cacheControl = blobPropertiesInXML[\"Cache-Control\"] as string;\n delete blobProperties[\"Cache-Control\"];\n }\n\n if (blobPropertiesInXML[\"x-ms-blob-sequence-number\"]) {\n blobProperties.blobSequenceNumber = parseFloat(\n blobPropertiesInXML[\"x-ms-blob-sequence-number\"] as string\n );\n delete blobProperties[\"x-ms-blob-sequence-number\"];\n }\n\n if (blobPropertiesInXML[\"BlobType\"]) {\n blobProperties.blobType = blobPropertiesInXML[\"BlobType\"] as BlobType;\n delete blobProperties[\"BlobType\"];\n }\n\n if (blobPropertiesInXML[\"LeaseStatus\"]) {\n blobProperties.leaseStatus = blobPropertiesInXML[\"LeaseStatus\"] as LeaseStatusType;\n delete blobProperties[\"LeaseStatus\"];\n }\n\n if (blobPropertiesInXML[\"LeaseState\"]) {\n blobProperties.leaseState = blobPropertiesInXML[\"LeaseState\"] as LeaseStateType;\n delete blobProperties[\"LeaseState\"];\n }\n\n if (blobPropertiesInXML[\"LeaseDuration\"]) {\n blobProperties.leaseDuration = blobPropertiesInXML[\"LeaseDuration\"] as LeaseDurationType;\n delete blobProperties[\"LeaseDuration\"];\n }\n\n if (blobPropertiesInXML[\"CopyId\"]) {\n blobProperties.copyId = blobPropertiesInXML[\"CopyId\"] as string;\n delete blobProperties[\"CopyId\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatus\"]) {\n blobProperties.copyStatus = blobPropertiesInXML[\"CopyStatus\"] as CopyStatusType;\n delete blobProperties[\"CopyStatus\"];\n }\n\n if (blobPropertiesInXML[\"CopySource\"]) {\n blobProperties.copySource = blobPropertiesInXML[\"CopySource\"] as string;\n delete blobProperties[\"CopySource\"];\n }\n\n if (blobPropertiesInXML[\"CopyProgress\"]) {\n blobProperties.copyProgress = blobPropertiesInXML[\"CopyProgress\"] as string;\n delete blobProperties[\"CopyProgress\"];\n }\n\n if (blobPropertiesInXML[\"CopyCompletionTime\"]) {\n blobProperties.copyCompletedOn = new Date(blobPropertiesInXML[\"CopyCompletionTime\"] as string);\n delete blobProperties[\"CopyCompletionTime\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatusDescription\"]) {\n blobProperties.copyStatusDescription = blobPropertiesInXML[\"CopyStatusDescription\"] as string;\n delete blobProperties[\"CopyStatusDescription\"];\n }\n\n if (blobPropertiesInXML[\"ServerEncrypted\"]) {\n blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML[\"ServerEncrypted\"]);\n delete blobProperties[\"ServerEncrypted\"];\n }\n\n if (blobPropertiesInXML[\"IncrementalCopy\"]) {\n blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML[\"IncrementalCopy\"]);\n delete blobProperties[\"IncrementalCopy\"];\n }\n\n if (blobPropertiesInXML[\"DestinationSnapshot\"]) {\n blobProperties.destinationSnapshot = blobPropertiesInXML[\"DestinationSnapshot\"] as string;\n delete blobProperties[\"DestinationSnapshot\"];\n }\n\n if (blobPropertiesInXML[\"DeletedTime\"]) {\n blobProperties.deletedOn = new Date(blobPropertiesInXML[\"DeletedTime\"] as string);\n delete blobProperties[\"DeletedTime\"];\n }\n\n if (blobPropertiesInXML[\"RemainingRetentionDays\"]) {\n blobProperties.remainingRetentionDays = parseFloat(\n blobPropertiesInXML[\"RemainingRetentionDays\"] as string\n );\n delete blobProperties[\"RemainingRetentionDays\"];\n }\n\n if (blobPropertiesInXML[\"AccessTier\"]) {\n blobProperties.accessTier = blobPropertiesInXML[\"AccessTier\"] as AccessTier;\n delete blobProperties[\"AccessTier\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierInferred\"]) {\n blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML[\"AccessTierInferred\"]);\n delete blobProperties[\"AccessTierInferred\"];\n }\n\n if (blobPropertiesInXML[\"ArchiveStatus\"]) {\n blobProperties.archiveStatus = blobPropertiesInXML[\"ArchiveStatus\"] as ArchiveStatus;\n delete blobProperties[\"ArchiveStatus\"];\n }\n\n if (blobPropertiesInXML[\"CustomerProvidedKeySha256\"]) {\n blobProperties.customerProvidedKeySha256 = blobPropertiesInXML[\n \"CustomerProvidedKeySha256\"\n ] as string;\n delete blobProperties[\"CustomerProvidedKeySha256\"];\n }\n\n if (blobPropertiesInXML[\"EncryptionScope\"]) {\n blobProperties.encryptionScope = blobPropertiesInXML[\"EncryptionScope\"] as string;\n delete blobProperties[\"EncryptionScope\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierChangeTime\"]) {\n blobProperties.accessTierChangedOn = new Date(\n blobPropertiesInXML[\"AccessTierChangeTime\"] as string\n );\n delete blobProperties[\"AccessTierChangeTime\"];\n }\n\n if (blobPropertiesInXML[\"TagCount\"]) {\n blobProperties.tagCount = parseFloat(blobPropertiesInXML[\"TagCount\"] as string);\n delete blobProperties[\"TagCount\"];\n }\n\n if (blobPropertiesInXML[\"Expiry-Time\"]) {\n blobProperties.expiresOn = new Date(blobPropertiesInXML[\"Expiry-Time\"] as string);\n delete blobProperties[\"Expiry-Time\"];\n }\n\n if (blobPropertiesInXML[\"Sealed\"]) {\n blobProperties.isSealed = ParseBoolean(blobPropertiesInXML[\"Sealed\"]);\n delete blobProperties[\"Sealed\"];\n }\n\n if (blobPropertiesInXML[\"RehydratePriority\"]) {\n blobProperties.rehydratePriority = blobPropertiesInXML[\n \"RehydratePriority\"\n ] as RehydratePriority;\n delete blobProperties[\"RehydratePriority\"];\n }\n\n if (blobPropertiesInXML[\"LastAccessTime\"]) {\n blobProperties.lastAccessedOn = new Date(blobPropertiesInXML[\"LastAccessTime\"] as string);\n delete blobProperties[\"LastAccessTime\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"]) {\n blobProperties.immutabilityPolicyExpiresOn = new Date(\n blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"] as string\n );\n delete blobProperties[\"ImmutabilityPolicyUntilDate\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyMode\"]) {\n blobProperties.immutabilityPolicyMode = blobPropertiesInXML[\n \"ImmutabilityPolicyMode\"\n ] as BlobImmutabilityPolicyMode;\n delete blobProperties[\"ImmutabilityPolicyMode\"];\n }\n\n if (blobPropertiesInXML[\"LegalHold\"]) {\n blobProperties.legalHold = ParseBoolean(blobPropertiesInXML[\"LegalHold\"]);\n delete blobProperties[\"LegalHold\"];\n }\n\n return blobProperties;\n}\n\nfunction ParseBlobItem(blobInXML: any): BlobItemInternal {\n const blobItem = blobInXML;\n blobItem.properties = ParseBlobProperties(blobInXML[\"Properties\"]);\n delete blobItem[\"Properties\"];\n\n blobItem.name = ParseBlobName(blobInXML[\"Name\"]);\n delete blobItem[\"Name\"];\n blobItem.deleted = ParseBoolean(blobInXML[\"Deleted\"])!;\n delete blobItem[\"Deleted\"];\n\n if (blobInXML[\"Snapshot\"]) {\n blobItem.snapshot = blobInXML[\"Snapshot\"] as string;\n delete blobItem[\"Snapshot\"];\n }\n\n if (blobInXML[\"VersionId\"]) {\n blobItem.versionId = blobInXML[\"VersionId\"] as string;\n delete blobItem[\"VersionId\"];\n }\n\n if (blobInXML[\"IsCurrentVersion\"]) {\n blobItem.isCurrentVersion = ParseBoolean(blobInXML[\"IsCurrentVersion\"]);\n delete blobItem[\"IsCurrentVersion\"];\n }\n\n if (blobInXML[\"Metadata\"]) {\n blobItem.metadata = blobInXML[\"Metadata\"];\n delete blobItem[\"Metadata\"];\n }\n\n if (blobInXML[\"Tags\"]) {\n blobItem.blobTags = ParseBlobTags(blobInXML[\"Tags\"]);\n delete blobItem[\"Tags\"];\n }\n\n if (blobInXML[\"OrMetadata\"]) {\n blobItem.objectReplicationMetadata = blobInXML[\"OrMetadata\"];\n delete blobItem[\"OrMetadata\"];\n }\n\n if (blobInXML[\"HasVersionsOnly\"]) {\n blobItem.hasVersionsOnly = ParseBoolean(blobInXML[\"HasVersionsOnly\"]);\n delete blobItem[\"HasVersionsOnly\"];\n }\n return blobItem;\n}\n\nfunction ParseBlobPrefix(blobPrefixInXML: any): BlobPrefix {\n return {\n name: ParseBlobName(blobPrefixInXML[\"Name\"]),\n };\n}\n\nfunction ParseBlobTag(blobTagInXML: any): BlobTag {\n return {\n key: blobTagInXML[\"Key\"],\n value: blobTagInXML[\"Value\"],\n };\n}\n\nfunction ParseBlobTags(blobTagsInXML: any): BlobTags | undefined {\n if (\n blobTagsInXML === undefined ||\n blobTagsInXML[\"TagSet\"] === undefined ||\n blobTagsInXML[\"TagSet\"][\"Tag\"] === undefined\n ) {\n return undefined;\n }\n\n const blobTagSet = [];\n if (blobTagsInXML[\"TagSet\"][\"Tag\"] instanceof Array) {\n blobTagsInXML[\"TagSet\"][\"Tag\"].forEach((blobTagInXML: any) => {\n blobTagSet.push(ParseBlobTag(blobTagInXML));\n });\n } else {\n blobTagSet.push(ParseBlobTag(blobTagsInXML[\"TagSet\"][\"Tag\"]));\n }\n\n return { blobTagSet: blobTagSet };\n}\n\nexport function ProcessBlobItems(blobArrayInXML: any[]): BlobItemInternal[] {\n const blobItems = [];\n\n if (blobArrayInXML instanceof Array) {\n blobArrayInXML.forEach((blobInXML: any) => {\n blobItems.push(ParseBlobItem(blobInXML));\n });\n } else {\n blobItems.push(ParseBlobItem(blobArrayInXML));\n }\n\n return blobItems;\n}\n\nexport function ProcessBlobPrefixes(blobPrefixesInXML: any[]): BlobPrefix[] {\n const blobPrefixes = [];\n\n if (blobPrefixesInXML instanceof Array) {\n blobPrefixesInXML.forEach((blobPrefixInXML: any) => {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML));\n });\n } else {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML));\n }\n\n return blobPrefixes;\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.10.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2021-06-08\";\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise { // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => { \n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n if (pos >= count) {\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLike, BlobBeginCopyFromURLResponse>\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n ProcessBlobItems,\n ProcessBlobPrefixes,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n response.segment.blobPrefixes = [];\n if ((response.segment as any)[\"BlobPrefix\"] !== undefined) {\n response.segment.blobPrefixes = ProcessBlobPrefixes(\n (response.segment as any)[\"BlobPrefix\"]\n );\n }\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container.\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n"],"names":["BlobServicePropertiesMapper","QueryCollectionFormat","KeyInfoMapper","QueryRequestMapper","BlobTagsMapper","BlockLookupListMapper","coreHttp","getPropertiesOperationSpec","getAccountInfoOperationSpec","submitBatchOperationSpec","filterBlobsOperationSpec","xmlSerializer","Mappers.ServiceSetPropertiesHeaders","Mappers.StorageError","Mappers.ServiceSetPropertiesExceptionHeaders","Parameters.blobServiceProperties","Parameters.restype","Parameters.comp","Parameters.timeoutInSeconds","Parameters.url","Parameters.contentType","Parameters.accept","Parameters.version","Parameters.requestId","Mappers.BlobServiceProperties","Mappers.ServiceGetPropertiesHeaders","Mappers.ServiceGetPropertiesExceptionHeaders","Parameters.accept1","Mappers.BlobServiceStatistics","Mappers.ServiceGetStatisticsHeaders","Mappers.ServiceGetStatisticsExceptionHeaders","Parameters.comp1","Mappers.ListContainersSegmentResponse","Mappers.ServiceListContainersSegmentHeaders","Mappers.ServiceListContainersSegmentExceptionHeaders","Parameters.comp2","Parameters.prefix","Parameters.marker","Parameters.maxPageSize","Parameters.include","Mappers.UserDelegationKey","Mappers.ServiceGetUserDelegationKeyHeaders","Mappers.ServiceGetUserDelegationKeyExceptionHeaders","Parameters.keyInfo","Parameters.comp3","Mappers.ServiceGetAccountInfoHeaders","Mappers.ServiceGetAccountInfoExceptionHeaders","Parameters.restype1","Mappers.ServiceSubmitBatchHeaders","Mappers.ServiceSubmitBatchExceptionHeaders","Parameters.body","Parameters.comp4","Parameters.contentLength","Parameters.multipartContentType","Mappers.FilterBlobSegment","Mappers.ServiceFilterBlobsHeaders","Mappers.ServiceFilterBlobsExceptionHeaders","Parameters.comp5","Parameters.where","createOperationSpec","deleteOperationSpec","setMetadataOperationSpec","acquireLeaseOperationSpec","releaseLeaseOperationSpec","renewLeaseOperationSpec","breakLeaseOperationSpec","changeLeaseOperationSpec","Mappers.ContainerCreateHeaders","Mappers.ContainerCreateExceptionHeaders","Parameters.restype2","Parameters.metadata","Parameters.access","Parameters.defaultEncryptionScope","Parameters.preventEncryptionScopeOverride","Mappers.ContainerGetPropertiesHeaders","Mappers.ContainerGetPropertiesExceptionHeaders","Parameters.leaseId","Mappers.ContainerDeleteHeaders","Mappers.ContainerDeleteExceptionHeaders","Parameters.ifModifiedSince","Parameters.ifUnmodifiedSince","Mappers.ContainerSetMetadataHeaders","Mappers.ContainerSetMetadataExceptionHeaders","Parameters.comp6","Mappers.ContainerGetAccessPolicyHeaders","Mappers.ContainerGetAccessPolicyExceptionHeaders","Parameters.comp7","Mappers.ContainerSetAccessPolicyHeaders","Mappers.ContainerSetAccessPolicyExceptionHeaders","Parameters.containerAcl","Mappers.ContainerRestoreHeaders","Mappers.ContainerRestoreExceptionHeaders","Parameters.comp8","Parameters.deletedContainerName","Parameters.deletedContainerVersion","Mappers.ContainerRenameHeaders","Mappers.ContainerRenameExceptionHeaders","Parameters.comp9","Parameters.sourceContainerName","Parameters.sourceLeaseId","Mappers.ContainerSubmitBatchHeaders","Mappers.ContainerSubmitBatchExceptionHeaders","Mappers.ContainerFilterBlobsHeaders","Mappers.ContainerFilterBlobsExceptionHeaders","Mappers.ContainerAcquireLeaseHeaders","Mappers.ContainerAcquireLeaseExceptionHeaders","Parameters.comp10","Parameters.action","Parameters.duration","Parameters.proposedLeaseId","Mappers.ContainerReleaseLeaseHeaders","Mappers.ContainerReleaseLeaseExceptionHeaders","Parameters.action1","Parameters.leaseId1","Mappers.ContainerRenewLeaseHeaders","Mappers.ContainerRenewLeaseExceptionHeaders","Parameters.action2","Mappers.ContainerBreakLeaseHeaders","Mappers.ContainerBreakLeaseExceptionHeaders","Parameters.action3","Parameters.breakPeriod","Mappers.ContainerChangeLeaseHeaders","Mappers.ContainerChangeLeaseExceptionHeaders","Parameters.action4","Parameters.proposedLeaseId1","Mappers.ListBlobsFlatSegmentResponse","Mappers.ContainerListBlobFlatSegmentHeaders","Mappers.ContainerListBlobFlatSegmentExceptionHeaders","Parameters.include1","Mappers.ListBlobsHierarchySegmentResponse","Mappers.ContainerListBlobHierarchySegmentHeaders","Mappers.ContainerListBlobHierarchySegmentExceptionHeaders","Parameters.delimiter","Mappers.ContainerGetAccountInfoHeaders","Mappers.ContainerGetAccountInfoExceptionHeaders","Blob","Mappers.BlobDownloadHeaders","Mappers.BlobDownloadExceptionHeaders","Parameters.snapshot","Parameters.versionId","Parameters.range","Parameters.rangeGetContentMD5","Parameters.rangeGetContentCRC64","Parameters.encryptionKey","Parameters.encryptionKeySha256","Parameters.encryptionAlgorithm","Parameters.ifMatch","Parameters.ifNoneMatch","Parameters.ifTags","Mappers.BlobGetPropertiesHeaders","Mappers.BlobGetPropertiesExceptionHeaders","Mappers.BlobDeleteHeaders","Mappers.BlobDeleteExceptionHeaders","Parameters.blobDeleteType","Parameters.deleteSnapshots","Mappers.BlobUndeleteHeaders","Mappers.BlobUndeleteExceptionHeaders","Mappers.BlobSetExpiryHeaders","Mappers.BlobSetExpiryExceptionHeaders","Parameters.comp11","Parameters.expiryOptions","Parameters.expiresOn","Mappers.BlobSetHttpHeadersHeaders","Mappers.BlobSetHttpHeadersExceptionHeaders","Parameters.blobCacheControl","Parameters.blobContentType","Parameters.blobContentMD5","Parameters.blobContentEncoding","Parameters.blobContentLanguage","Parameters.blobContentDisposition","Mappers.BlobSetImmutabilityPolicyHeaders","Mappers.BlobSetImmutabilityPolicyExceptionHeaders","Parameters.comp12","Parameters.immutabilityPolicyExpiry","Parameters.immutabilityPolicyMode","Mappers.BlobDeleteImmutabilityPolicyHeaders","Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders","Mappers.BlobSetLegalHoldHeaders","Mappers.BlobSetLegalHoldExceptionHeaders","Parameters.comp13","Parameters.legalHold","Mappers.BlobSetMetadataHeaders","Mappers.BlobSetMetadataExceptionHeaders","Parameters.encryptionScope","Mappers.BlobAcquireLeaseHeaders","Mappers.BlobAcquireLeaseExceptionHeaders","Mappers.BlobReleaseLeaseHeaders","Mappers.BlobReleaseLeaseExceptionHeaders","Mappers.BlobRenewLeaseHeaders","Mappers.BlobRenewLeaseExceptionHeaders","Mappers.BlobChangeLeaseHeaders","Mappers.BlobChangeLeaseExceptionHeaders","Mappers.BlobBreakLeaseHeaders","Mappers.BlobBreakLeaseExceptionHeaders","Mappers.BlobCreateSnapshotHeaders","Mappers.BlobCreateSnapshotExceptionHeaders","Parameters.comp14","Mappers.BlobStartCopyFromURLHeaders","Mappers.BlobStartCopyFromURLExceptionHeaders","Parameters.tier","Parameters.rehydratePriority","Parameters.sourceIfModifiedSince","Parameters.sourceIfUnmodifiedSince","Parameters.sourceIfMatch","Parameters.sourceIfNoneMatch","Parameters.sourceIfTags","Parameters.copySource","Parameters.blobTagsString","Parameters.sealBlob","Parameters.legalHold1","Mappers.BlobCopyFromURLHeaders","Mappers.BlobCopyFromURLExceptionHeaders","Parameters.xMsRequiresSync","Parameters.sourceContentMD5","Parameters.copySourceAuthorization","Parameters.copySourceTags","Mappers.BlobAbortCopyFromURLHeaders","Mappers.BlobAbortCopyFromURLExceptionHeaders","Parameters.comp15","Parameters.copyId","Parameters.copyActionAbortConstant","Mappers.BlobSetTierHeaders","Mappers.BlobSetTierExceptionHeaders","Parameters.comp16","Parameters.tier1","Mappers.BlobGetAccountInfoHeaders","Mappers.BlobGetAccountInfoExceptionHeaders","Mappers.BlobQueryHeaders","Mappers.BlobQueryExceptionHeaders","Parameters.queryRequest","Parameters.comp17","Mappers.BlobTags","Mappers.BlobGetTagsHeaders","Mappers.BlobGetTagsExceptionHeaders","Parameters.comp18","Mappers.BlobSetTagsHeaders","Mappers.BlobSetTagsExceptionHeaders","Parameters.tags","Parameters.transactionalContentMD5","Parameters.transactionalContentCrc64","serializer","Mappers.PageBlobCreateHeaders","Mappers.PageBlobCreateExceptionHeaders","Parameters.blobType","Parameters.blobContentLength","Parameters.blobSequenceNumber","Mappers.PageBlobUploadPagesHeaders","Mappers.PageBlobUploadPagesExceptionHeaders","Parameters.body1","Parameters.comp19","Parameters.contentType1","Parameters.accept2","Parameters.pageWrite","Parameters.ifSequenceNumberLessThanOrEqualTo","Parameters.ifSequenceNumberLessThan","Parameters.ifSequenceNumberEqualTo","Mappers.PageBlobClearPagesHeaders","Mappers.PageBlobClearPagesExceptionHeaders","Parameters.pageWrite1","Mappers.PageBlobUploadPagesFromURLHeaders","Mappers.PageBlobUploadPagesFromURLExceptionHeaders","Parameters.sourceUrl","Parameters.sourceRange","Parameters.sourceContentCrc64","Parameters.range1","Mappers.PageList","Mappers.PageBlobGetPageRangesHeaders","Mappers.PageBlobGetPageRangesExceptionHeaders","Parameters.comp20","Mappers.PageBlobGetPageRangesDiffHeaders","Mappers.PageBlobGetPageRangesDiffExceptionHeaders","Parameters.prevsnapshot","Parameters.prevSnapshotUrl","Mappers.PageBlobResizeHeaders","Mappers.PageBlobResizeExceptionHeaders","Mappers.PageBlobUpdateSequenceNumberHeaders","Mappers.PageBlobUpdateSequenceNumberExceptionHeaders","Parameters.sequenceNumberAction","Mappers.PageBlobCopyIncrementalHeaders","Mappers.PageBlobCopyIncrementalExceptionHeaders","Parameters.comp21","Mappers.AppendBlobCreateHeaders","Mappers.AppendBlobCreateExceptionHeaders","Parameters.blobType1","Mappers.AppendBlobAppendBlockHeaders","Mappers.AppendBlobAppendBlockExceptionHeaders","Parameters.comp22","Parameters.maxSize","Parameters.appendPosition","Mappers.AppendBlobAppendBlockFromUrlHeaders","Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders","Parameters.sourceRange1","Mappers.AppendBlobSealHeaders","Mappers.AppendBlobSealExceptionHeaders","Parameters.comp23","Mappers.BlockBlobUploadHeaders","Mappers.BlockBlobUploadExceptionHeaders","Parameters.blobType2","Mappers.BlockBlobPutBlobFromUrlHeaders","Mappers.BlockBlobPutBlobFromUrlExceptionHeaders","Parameters.copySourceBlobProperties","Mappers.BlockBlobStageBlockHeaders","Mappers.BlockBlobStageBlockExceptionHeaders","Parameters.comp24","Parameters.blockId","Mappers.BlockBlobStageBlockFromURLHeaders","Mappers.BlockBlobStageBlockFromURLExceptionHeaders","Mappers.BlockBlobCommitBlockListHeaders","Mappers.BlockBlobCommitBlockListExceptionHeaders","Parameters.blocks","Parameters.comp25","Mappers.BlockList","Mappers.BlockBlobGetBlockListHeaders","Mappers.BlockBlobGetBlockListExceptionHeaders","Parameters.listType","createClientLogger","URLBuilder","isNode","BaseRequestPolicy","StorageRetryPolicyType","AbortError","HttpHeaders","os","DefaultHttpClient","delay","tracingPolicy","keepAlivePolicy","generateClientRequestIdPolicy","deserializationPolicy","logPolicy","proxyPolicy","disableResponseDecompressionPolicy","isTokenCredential","createHmac","createSpanFunction","SASProtocol","StorageBlob","generateUuid","SpanStatusCode","Readable","__await","BlockBlobTier","PremiumPageBlobTier","StorageBlobAudience","Poller","EventEmitter","fs","util","getDefaultProxySettings","__asyncValues","__asyncDelegator","bearerTokenAuthenticationPolicy","WebResource"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;AAMG;AAII,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,0BAA0B;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,UAAU;AACtB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,eAAe;AAC3B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,qBAAqB;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,gBAAgB;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;AACpD,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,eAAe;AAC3B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,uCAAuC;AACvD,gBAAA,OAAO,EAAE,uCAAuC;AAChD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,gBAAgB;AAC5B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,SAAS;AACrB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,OAAO,EAAE,KAAK;AACd,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,kBAAkB;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,EAAE,EAAE;AACF,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,cAAc;AAC1B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,wBAAwB;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;AACV,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;AAC5B,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,0BAA0B;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA6B;AAC7C,IAAA,cAAc,EAAE,OAAO;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,OAAO;AAClB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,WAAW;AACvB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,OAAO,EAAE,cAAc;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,aAAa;AACzB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA6B;AACnD,IAAA,cAAc,EAAE,aAAa;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,aAAa;AACxB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;AACzD,iBAAA;AACF,aAAA;AACD,YAAA,0BAA0B,EAAE;AAC1B,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,4BAA4B;AACxC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,uBAAuB;AACnC,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,KAAK;AACZ,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,OAAO,EAAE,4BAA4B;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,uBAAuB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2CAA2C,GAA6B;AACnF,IAAA,cAAc,EAAE,8CAA8C;AAC9D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6CAA6C;AACxD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,gDAAgD;AAChE,gBAAA,OAAO,EAAE,gDAAgD;AACzD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sCAAsC,GAA6B;AAC9E,IAAA,cAAc,EAAE,yCAAyC;AACzD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wCAAwC;AACnD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iDAAiD,GAA6B;AACzF,IAAA,cAAc,EAAE,oDAAoD;AACpE,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mDAAmD;AAC9D,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,2BAA2B;AAC3C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,gCAAgC;AAChD,gBAAA,OAAO,EAAE,gCAAgC;AACzC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAA6B;AAC5D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sBAAsB;AACjC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,YAAY,EAAE,SAAS;AACvB,gBAAA,UAAU,EAAE,IAAI;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACnrQD;;;;;;AAMG;AAgBI,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE,uBAAuB;AACtC,IAAA,MAAM,EAAEA,qBAA2B;CACpC,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,GAAG,GAA0B;AACxC,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,KAAK;AACrB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;AACD,IAAA,YAAY,EAAE,IAAI;CACnB,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAA4B;AAC3C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA4B;AACvD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA4B;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAEC,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAEC,OAAa;CACtB,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,mBAAmB;AACjC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,OAAO;AACvB,QAAA,OAAO,EAAE,OAAO;AAChB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,SAAA;AACD,QAAA,sBAAsB,EAAE,YAAY;AACrC,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;AACzB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAAuB;AAChE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;AACjC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;AAC9D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;AACzE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;AAC3E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,KAAK;AACnB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,YAAY,EAAE,IAAI;AAClB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,kBAAkB;AAC9B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,qBAAqB;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;AAC3C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;AACtB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAED,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,8BAA8B;AAC9C,QAAA,OAAO,EAAE,8BAA8B;AACvC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;AACrE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA4B;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;AAC/D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,sBAAsB;AACpC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;AACpD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;AACV,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;AAC/C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;AACxB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iCAAiC;AACjD,QAAA,OAAO,EAAE,iCAAiC;AAC1C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;AAC7E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;AACpB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;AAC5E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,YAAY;AAC3B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,yBAAyB;AACxC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;AACV,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAEE,YAAkB;CAC3B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAEC,QAAc;CACvB,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,aAAa;AAC7B,QAAA,OAAO,EAAE,aAAa;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAAuB;AAC3D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;AACvD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,mBAAmB;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,0BAA0B;AAC1C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,0BAA0B;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,0BAA0B;AACxC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAAuB;AACnE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;AACpC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;AAC3B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,aAAa;AAC5B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,OAAO;AACtB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA4B;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;AAC9C,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,aAAa;AAC3B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;AACjB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kCAAkC;AAClD,QAAA,OAAO,EAAE,kCAAkC;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAEC,eAAqB;CAC9B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;AACnD,SAAA;AACF,KAAA;CACF;;ACluDD;;;;;;AAMG;AA0BH;MACa,OAAO,CAAA;AAGlB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;AAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAEC,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACc,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;KACpD;AAED;;;;;AAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD,EAAA;AAEnD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;KACnD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACc,CAAC;KAC7C;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEF,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBG,0BAAwB,CACc,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEH,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBI,0BAAwB,CACc,CAAC;KAC1C;AACF,CAAA;AACD;AACA,MAAMC,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEM,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEC,YAAoB;YAChC,aAAa,EAAEC,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,qBAAgC;AAC7C,IAAA,eAAe,EAAE;AACf,QAAAC,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEZ,YAAoB;YAChC,aAAa,EAAEa,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAV,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhB,YAAoB;YAChC,aAAa,EAAEiB,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAd,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAAa,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACZ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEqB,6BAAqC;YACjD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpB,YAAoB;YAChC,aAAa,EAAEqB,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhB,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAACpB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE6B,iBAAyB;YACrC,aAAa,EAAEC,kCAA0C;AAC1D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5B,YAAoB;YAChC,aAAa,EAAE6B,2CAAmD;AACnE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,OAAkB;AAC/B,IAAA,eAAe,EAAE;AACf,QAAA3B,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAA0B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhC,YAAoB;YAChC,aAAa,EAAEiC,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7B,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMF,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEuC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnC,YAAoB;YAChC,aAAa,EAAEoC,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;IAC5B,eAAe,EAAE,CAAChC,gBAA2B,EAAEiC,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAMD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4C,iBAAyB;YACrC,aAAa,EAAEC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1C,YAAoB;YAChC,aAAa,EAAE2C,kCAA0C;AAC1D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtC,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;ACraD;;;;;;AAMG;AA4CH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA8C,EAAA;AAE9C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACgB,CAAC;KAC9C;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBsD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;AAGG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEtD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBuD,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEvD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;AAGG;AACH,IAAA,OAAO,CACL,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;KACxC;AAED;;;;AAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBwD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAExD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClByD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEzD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB0D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE1D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB2D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE3D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB4D,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,mBAAmB,CACjB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE5D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;KACpD;AAED;;;;;;;AAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD,EAAA;AAEzD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;KACzD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACgB,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMG,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEQ,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtD,YAAoB;YAChC,aAAa,EAAEuD,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAC,MAAiB;AACjB,QAAAC,sBAAiC;AACjC,QAAAC,8BAAyC;AAC1C,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9D,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmE,6BAAqC;AACrD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7D,YAAoB;YAChC,aAAa,EAAE8D,sCAA8C;AAC9D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAMiD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiB,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhE,YAAoB;YAChC,aAAa,EAAEiE,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5D,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAMkD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoB,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpE,YAAoB;YAChC,aAAa,EAAEqE,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAc,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEpE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;AAC3D,qBAAA;AACF,iBAAA;AACD,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,kBAAkB;AACnC,aAAA;YACD,aAAa,EAAEyE,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvE,YAAoB;YAChC,aAAa,EAAEwE,wCAAgD;AAChE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAnE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4E,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1E,YAAoB;YAChC,aAAa,EAAE2E,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAvE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAgD,MAAiB;AACjB,QAAAK,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+E,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7E,YAAoB;YAChC,aAAa,EAAE8E,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAuB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAkE,oBAA+B;AAC/B,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnF,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoF,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElF,YAAoB;YAChC,aAAa,EAAEmF,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA9E,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAA4B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC9E,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAuE,mBAA8B;AAC9B,QAAAC,aAAwB;AACzB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExF,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEyF,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvF,YAAoB;YAChC,aAAa,EAAEwF,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEnD,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAAhC,gBAA2B;AAC3B,QAAAiC,KAAgB;AAChB,QAAAkB,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE2C,iBAAyB;YACrC,aAAa,EAAEgD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzF,YAAoB;YAChC,aAAa,EAAE0F,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAArF,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AAChB,QAAAW,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMmD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3F,YAAoB;YAChC,aAAa,EAAE4F,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvF,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAElG,eAAa;CAC1B,CAAC;AACF,MAAMoD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjG,YAAoB;YAChC,aAAa,EAAEkG,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7F,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACpB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtG,eAAa;CAC1B,CAAC;AACF,MAAMqD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErG,YAAoB;YAChC,aAAa,EAAEsG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzG,eAAa;CAC1B,CAAC;AACF,MAAMsD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExG,YAAoB;YAChC,aAAa,EAAEyG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACvB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7G,eAAa;CAC1B,CAAC;AACF,MAAMuD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5G,YAAoB;YAChC,aAAa,EAAE6G,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAxG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjH,eAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;AAC/D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEkH,4BAAoC;YAChD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjH,YAAoB;YAChC,aAAa,EAAEkH,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7G,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7G,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEsH,iCAAyC;YACrD,aAAa,EAAEC,wCAAgD;AAChE,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErH,YAAoB;YAChC,aAAa,EAAEsH,iDAAyD;AACzE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjH,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACnB,QAAAI,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACjH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6H,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExH,YAAoB;YAChC,aAAa,EAAEyH,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrH,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;AC/5BD;;;;;;AAMG;AA0DH;MACa4H,MAAI,CAAA;AAGf;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEjI,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAAyC,EAAA;AAEzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;KACzC;AAED;;;;;;;;;;;;;;AAcG;AACH,IAAA,MAAM,CAAC,OAAkC,EAAA;AACvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;KAClC;AAED;;;AAGG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC,EAAA;AAErC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;KACrC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;KACjD;AAED;;;AAGG;AACH,IAAA,wBAAwB,CACtB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;KACpD;AAED;;;;AAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;;;;AAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;;;AAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;AAIG;AACH,IAAA,KAAK,CAAC,OAAiC,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;KACjC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEkI,mBAA2B;AAC3C,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3H,YAAoB;YAChC,aAAa,EAAE4H,4BAAoC;AACpD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvH,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAC,kBAA6B;AAC7B,QAAAC,oBAA+B;AAC/B,QAAAC,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0I,wBAAgC;AAChD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExI,YAAoB;YAChC,aAAa,EAAEyI,iCAAyC;AACzD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4I,iBAAyB;AACzC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1I,YAAoB;YAChC,aAAa,EAAE2I,0BAAkC;AAClD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAc,cAAyB;AAC1B,KAAA;AACD,IAAA,aAAa,EAAE,CAACtI,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAM,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE/I,eAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgJ,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9I,YAAoB;YAChC,aAAa,EAAE+I,4BAAoC;AACpD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC1I,gBAA2B,EAAE0E,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;AACrD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkJ,oBAA4B;AAC5C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhJ,YAAoB;YAChC,aAAa,EAAEiJ,6BAAqC;AACrD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5I,gBAA2B,EAAE6I,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC5I,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqI,aAAwB;AACxB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtJ,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuJ,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErJ,YAAoB;YAChC,aAAa,EAAEsJ,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClJ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9J,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+J,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7J,YAAoB;YAChC,aAAa,EAAE8J,yCAAiD;AACjE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzJ,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqD,iBAA4B;AAC5B,QAAA6F,wBAAmC;AACnC,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnK,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoK,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElK,YAAoB;YAChC,aAAa,EAAEmK,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC9J,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsK,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpK,YAAoB;YAChC,aAAa,EAAEqK,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChK,gBAA2B,EAAEiK,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAChK,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyJ,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzK,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0K,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExK,YAAoB;YAChC,aAAa,EAAEyK,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACpK,gBAA2B,EAAEiE,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3K,YAAoB;YAChC,aAAa,EAAE4K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACvK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC1B,QAAAqC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7K,YAAoB;YAChC,aAAa,EAAE8K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACnB,QAAAiC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/K,YAAoB;YAChC,aAAa,EAAEgL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC3K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AAClB,QAAA8B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmL,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjL,YAAoB;YAChC,aAAa,EAAEkL,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC3B,QAAAsB,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnL,YAAoB;YAChC,aAAa,EAAEoL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA0B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuL,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErL,YAAoB;YAChC,aAAa,EAAEsL,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjL,gBAA2B,EAAEkL,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACjL,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0L,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExL,YAAoB;YAChC,aAAa,EAAEyL,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACpL,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAyB,IAAe;AACf,QAAAC,iBAA4B;AAC5B,QAAAC,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAC,QAAmB;AACnB,QAAAC,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtM,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuM,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErM,YAAoB;YAChC,aAAa,EAAEsM,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACjM,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAE,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAG,eAA0B;AAC1B,QAAAC,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5M,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6M,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3M,YAAoB;YAChC,aAAa,EAAE4M,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvM,gBAA2B;AAC3B,QAAAwM,MAAiB;AACjB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxM,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAgJ,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjN,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkN,kBAA0B;AAC1C,SAAA;AACD,QAAA,GAAG,EAAE;YACH,aAAa,EAAEA,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhN,YAAoB;YAChC,aAAa,EAAEiN,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5M,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAoF,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5M,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAAoD,iBAA4B;AAC5B,QAAAwB,KAAgB;AACjB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErN,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsN,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpN,YAAoB;YAChC,aAAa,EAAEqN,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjN,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;AACjD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEwN,gBAAwB;AACxC,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,gBAAwB;AACxC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtN,YAAoB;YAChC,aAAa,EAAEuN,yBAAiC;AACjD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAnN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4N,QAAgB;YAC5B,aAAa,EAAEC,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3N,YAAoB;YAChC,aAAa,EAAE4N,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgO,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9N,YAAoB;YAChC,aAAa,EAAE+N,2BAAmC;AACnD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAA3N,gBAA2B;AAC3B,QAAAyH,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAA0F,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEpO,eAAa;CAC1B;;AChwCD;;;;;;AAMG;AA4BH;MACa,QAAQ,CAAA;AAGnB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;AAMG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACe,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;KAC3C;AAED;;;;AAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;KAC1C;AAED;;;;;;;;;;AAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;KAClD;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;KAC7C;AAED;;;;AAIG;AACH,IAAA,iBAAiB,CACf,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;KACjD;AAED;;;;;AAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;KACtC;AAED;;;;;;AAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;KACpD;AAED;;;;;;;;;;;AAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpO,YAAoB;YAChC,aAAa,EAAEqO,8BAAsC;AACtD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAAChO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAkC,QAAmB;AACnB,QAAAC,iBAA4B;AAC5B,QAAAC,kBAA6B;AAC9B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1O,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2O,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzO,YAAoB;YAChC,aAAa,EAAE0O,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBf,YAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnP,YAAoB;YAChC,aAAa,EAAEoP,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/O,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAsE,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAG,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEvP,eAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwP,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtP,YAAoB;YAChC,aAAa,EAAEuP,0CAAkD;AAClE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClP,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAsC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAM,SAAoB;AACpB,QAAAC,WAAsB;AACtB,QAAAC,kBAA6B;AAC7B,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7P,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7P,YAAoB;YAChC,aAAa,EAAE8P,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzP,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzP,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEI,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhQ,YAAoB;YAChC,aAAa,EAAEiQ,yCAAiD;AACjE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5P,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AACjB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5P,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA4H,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErQ,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsQ,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpQ,YAAoB;YAChC,aAAa,EAAEqQ,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAA6D,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzO,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwQ,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtQ,YAAoB;YAChC,aAAa,EAAEuQ,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAiG,kBAA6B;AAC7B,QAAAgC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1Q,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2Q,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzQ,YAAoB;YAChC,aAAa,EAAE0Q,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrQ,gBAA2B,EAAEsQ,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACrQ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA0D,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnM,eAAa;CAC1B;;ACzkBD;;;;;;AAMG;AAiBH;MACa,UAAU,CAAA;AAGrB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;KACxC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;KAC7C;AAED;;;;;;;;AAQG;AACH,IAAA,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;KACpD;AAED;;;;AAIG;AACH,IAAA,IAAI,CACF,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;KACtC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmR,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5Q,YAAoB;YAChC,aAAa,EAAE6Q,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACxQ,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAwB,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA0E,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhR,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiR,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/Q,YAAoB;YAChC,aAAa,EAAEgR,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,WAAW,EAAErC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAoC,OAAkB;AAClB,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBhD,YAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiD,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpR,YAAoB;YAChC,aAAa,EAAEqR,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChR,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAwB,uBAAkC;AAClC,QAAAuB,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAAwB,OAAkB;AAClB,QAAAC,cAAyB;AACzB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExR,eAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;AAChD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEyR,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvR,YAAoB;YAChC,aAAa,EAAEwR,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnR,gBAA2B,EAAEoR,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnR,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA6I,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErR,eAAa;CAC1B;;ACxRD;;;;;;AAMG;AAuBH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;;;AAQG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;;;;;;AAYG;AACH,IAAA,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;KAC/C;AAED;;;;;;;;AAQG;AACH,IAAA,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;;AASG;AACH,IAAA,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;KAClD;AAED;;;;;;;;;;AAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;;;AAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;KAC7C;AACF,CAAA;AACD;AACA,MAAM,aAAa,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiS,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1R,YAAoB;YAChC,aAAa,EAAE2R,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,WAAW,EAAEhD,KAAgB;AAC7B,IAAA,eAAe,EAAE,CAACtO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAY,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAA8C,SAAoB;AACrB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7R,YAAoB;YAChC,aAAa,EAAE8R,uCAA+C;AAC/D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACzR,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAc,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAM,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AACzB,QAAAuB,uBAAkC;AAClC,QAAA2D,SAAoB;AACpB,QAAAG,wBAAmC;AACpC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhS,YAAoB;YAChC,aAAa,EAAEiS,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEtD,KAAgB;AAC7B,IAAA,eAAe,EAAE;AACf,QAAAtO,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsD,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpS,YAAoB;YAChC,aAAa,EAAEqS,0CAAkD;AAClE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhS,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAA+C,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAA4B,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtS,YAAoB;YAChC,aAAa,EAAEuS,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,MAAiB;IAC9B,eAAe,EAAE,CAACnS,gBAA2B,EAAEoS,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA+C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEwE,SAAiB;YAC7B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3S,YAAoB;YAChC,aAAa,EAAE4S,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvS,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4K,MAAiB;AACjB,QAAAI,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B;;AC3cD;AAKA;;AAEG;MACU,MAAM,GAAGuK,2BAAkB,CAAC,cAAc;;ACRvD;AACA;AAEO,MAAM,WAAW,GAAW,SAAS,CAAC;AACtC,MAAM,eAAe,GAAW,YAAY,CAAC;AAE7C,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC;AACnE,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;AACpE,MAAM,qBAAqB,GAAW,KAAK,CAAC;AAC5C,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAChE,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClE,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAC7D;;AAEG;AACI,MAAM,kBAAkB,GAAsB,qCAAqC;AAEnF,MAAM,YAAY,GAAG;AAC1B,IAAA,UAAU,EAAE;AACV,QAAA,sBAAsB,EAAE,GAAG;AAC3B,QAAA,SAAS,EAAE,KAAK;AAChB,QAAA,QAAQ,EAAE,UAAU;AACpB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,OAAO,EAAE,SAAS;AACnB,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAG;AAC/B,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,cAAc,EAAE,GAAG;AACnB,IAAA,kBAAkB,EAAE,GAAG;AACvB,IAAA,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEK,MAAM,eAAe,GAAG;AAC7B,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,oBAAoB,EAAE,QAAQ;AAC9B,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,WAAW,EAAE,aAAa;AAC1B,IAAA,yBAAyB,EAAE,2BAA2B;AACtD,IAAA,YAAY,EAAE,cAAc;AAC5B,IAAA,MAAM,EAAE,QAAQ;AAChB,IAAA,IAAI,EAAE,MAAM;AACZ,IAAA,QAAQ,EAAE,UAAU;AACpB,IAAA,iBAAiB,EAAE,mBAAmB;AACtC,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,mBAAmB,EAAE,qBAAqB;AAC1C,IAAA,kBAAkB,EAAE,OAAO;AAC3B,IAAA,KAAK,EAAE,OAAO;AACd,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,sBAAsB,EAAE,wBAAwB;AAChD,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,SAAS,EAAE,WAAW;AACtB,IAAA,eAAe,EAAE,iBAAiB;AAClC,IAAA,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEK,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,MAAM,OAAO,GAAG,GAAG,CAAC;AAEpB,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AAC9B,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACjD,MAAM,gBAAgB,GAAG,MAAM,CAAC;AAChC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAEpC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAE1C,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAE3P,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEK,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEK,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AACrF,MAAM,yCAAyC,GACpD,2CAA2C;;ACjN7C;AAmDA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmDG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,MAAM,SAAS,GAAGC,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;AAEnB,IAAA,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;AACpB,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAExB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB,EAAA;;;IAG5D,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;;QAEjE,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACrD,QAAA,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;AAC5D,gBAAA,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;AACxE,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAEe,SAAA,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B,EAAA;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,IAAA,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AACvC,YAAA,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;AACrD,SAAA;AACF,KAAA;AACD,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;AAKG;AACG,SAAU,4BAA4B,CAAC,gBAAwB,EAAA;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;AAElB,IAAA,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;;AAE7D,QAAA,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;AAChD,KAAA;;IAGD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;;;IAG1E,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;;QAGA,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;;AAGxB,QAAA,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;AACpE,QAAA,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;;;AAIjB,YAAA,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;AAC9F,YAAA,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;AACzD,YAAA,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;AAC/C,gBAAA,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;AACH,aAAA;AAED,YAAA,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;AACnB,gBAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,aAAA;YACD,YAAY,GAAG,GAAG,wBAAwB,CAAA,GAAA,EAAM,WAAW,CAAS,MAAA,EAAA,cAAc,EAAE,CAAC;AACtF,SAAA;QAED,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;AAC1E,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;AACzE,SAAA;QAED,OAAO;AACL,YAAA,IAAI,EAAE,mBAAmB;AACzB,YAAA,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;AACH,KAAA;AAAM,SAAA;;QAGL,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;AACnF,QAAA,MAAM,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;QACxD,IAAI,CAAC,YAAY,EAAE;AACjB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;aAAM,IAAI,CAAC,UAAU,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,SAAA;AAED,QAAA,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;AAC9E,KAAA;AACH,CAAC;AAED;;;;AAIG;AACH,SAAS,MAAM,CAAC,IAAY,EAAA;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;AAC5B,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC1B,CAAC;AAED;;;;;;;AAOG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAA,EAAG,IAAI,CAAA,EAAG,IAAI,CAAA,CAAE,GAAG,CAAG,EAAA,IAAI,CAAI,CAAA,EAAA,IAAI,EAAE,IAAI,IAAI,CAAC;AACjF,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAExB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;;AAQG;SACa,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc,EAAA;IACvE,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;AACzC,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;AAMG;AACa,SAAA,UAAU,CAAC,GAAW,EAAE,IAAY,EAAA;IAClD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACxB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,GAAW,EAAA;IACpC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,GAAW,EAAA;IACtC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;AAIG;AACG,SAAU,kBAAkB,CAAC,GAAW,EAAA;IAC5C,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;AACf,QAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;AACzD,KAAA;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;AAC7C,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;AACtB,QAAA,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,GAAG,CAAA,CAAA,EAAI,WAAW,CAAE,CAAA,CAAC;AAC7E,KAAA;AAED,IAAA,OAAO,CAAG,EAAA,UAAU,CAAG,EAAA,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;AAIG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,IAAI,WAAW,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;AAChB,QAAA,OAAO,EAAE,CAAC;AACX,KAAA;AAED,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,KAAI;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAChD,QAAA,QACE,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAC5F;AACJ,KAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;AAC9C,IAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAA,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,QAAA,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACtC,QAAA,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;AACtB,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;AAMG;AACa,SAAA,gBAAgB,CAAC,GAAW,EAAE,UAAkB,EAAA;IAC9D,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;AACjC,IAAA,IAAI,KAAK,EAAE;AACT,QAAA,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;AAC3B,KAAA;AAAM,SAAA;QACL,KAAK,GAAG,UAAU,CAAC;AACpB,KAAA;AAED,IAAA,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AAC1B,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;AAOG;SACa,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI,EAAA;;AAE/E,IAAA,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AAEtC,IAAA,OAAO,gBAAgB;AACrB,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;AAC/D,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,OAAe,EAAA;IAC1C,OAAO,CAACC,eAAM,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAWD;;;;AAIG;AACa,SAAA,eAAe,CAAC,aAAqB,EAAE,UAAkB,EAAA;;IAEvE,MAAM,qBAAqB,GAAG,EAAE,CAAC;;IAGjC,MAAM,mBAAmB,GAAG,CAAC,CAAC;AAE9B,IAAA,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;AAElF,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;AACvE,KAAA;IACD,MAAM,GAAG,GACP,aAAa;AACb,QAAA,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AACrF,IAAA,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACI,eAAe,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB,EAAA;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;;AAE3C,QAAA,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,MAAK;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;AACvB,aAAA;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;AACrB,SAAC,CAAC;QAEF,MAAM,cAAc,GAAG,MAAK;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,gBAAA,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACpD,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC;AAEF,QAAA,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,YAAA,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjD,SAAA;AACH,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;AAMG;AACG,SAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG,EAAA;;AAGvB,IAAA,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;AACxD,KAAA;AAED,IAAA,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;AAC7B,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;AACvC,QAAA,OAAO,aAAa,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;AACnD,QAAA,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;AAChE,SAAA;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;AACzD,KAAA;AACH,CAAC;AAyBD;;;;;AAKG;AACa,SAAA,MAAM,CAAC,IAAY,EAAE,IAAY,EAAA;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;AAIG;AACG,SAAU,qBAAqB,CAAC,GAAW,EAAA;IAC/C,MAAM,SAAS,GAAeD,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACpD,IAAA,IAAI,WAAW,CAAC;IAChB,IAAI;AACF,QAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;AAEjD,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA;;YAEL,WAAW,GAAG,EAAE,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;AAAC,IAAA,OAAO,KAAU,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,KAAA;AACH,CAAC;AAEK,SAAU,iBAAiB,CAAC,SAAqB,EAAA;AACrD,IAAA,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;AACrC,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,GAAG,EAAE,GAAG,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;;;;;AAM9F,IAAA,OAAO,4HAA4H,CAAC,IAAI,CACtI,IAAI,CACL,CAAC;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,gBAAgB,CAAC,IAAW,EAAA;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,QAAQ,CAAC,IAAI,CAAC,CAAA,EAAG,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAA,EAAI,kBAAkB,CAAC,KAAK,CAAC,CAAA,CAAE,CAAC,CAAC;AAC1E,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,IAAW,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,MAAM,GAAG,GAAa;AACpB,QAAA,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;AACN,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,MAAM,CAAC,IAAe,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;AACrB,IAAA,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,oBAAoB,CAClC,iBAIiC,EAAA;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;AACnC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,QAAQ,iBAAiB,CAAC,IAAI;AAC5B,QAAA,KAAK,KAAK;YACR,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,0BAA0B,EAAE;AAC1B,wBAAA,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;AACzD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;AAClD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;AACnD,wBAAA,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;AACtD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,MAAM;YACT,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;AACnD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,OAAO;YACV,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,OAAO;AACb,oBAAA,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;AACjC,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,SAAS;YACZ,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;aACF,CAAC;AAEJ,QAAA;AACE,YAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACtD,KAAA;AACH,CAAC;AAEK,SAAU,4BAA4B,CAC1C,uBAAgD,EAAA;IAEhD,IAAI,CAAC,uBAAuB,EAAE;AAC5B,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;;;AAG1C,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;AACnD,IAAA,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;AACnC,YAAA,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;AAChD,SAAA;AACD,QAAA,MAAM,IAAI,GAA0B;AAClC,YAAA,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;AACd,YAAA,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACnF,QAAA,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,SAAA;AAAM,aAAA;YACL,YAAY,CAAC,IAAI,CAAC;AAChB,gBAAA,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;AACd,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;AAKG;AACa,SAAA,gBAAgB,CAAI,KAAQ,EAAE,UAA2B,EAAA;AACtE,IAAA,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;AACvC,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAEK,SAAU,yBAAyB,CACvC,iBAAqC,EAAA;AAErC,IAAA,OAAO,iBAAiB,GAAG,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,GAAG,SAAS,CAAC;AAClG,CAAC;AAEK,SAAU,gBAAgB,CAAC,IAAc,EAAA;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,QAAA,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;AAC1C,KAAA;AAAM,SAAA;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;AACtB,KAAA;AACH,CAAC;AAEK,SAAU,qCAAqC,CACnD,gBAA8C,EAAA;IAE9C,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,gBAAmD,EAAA;;IAEnD,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,YAAY,EAAE,CAAA,EAAA,GAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AAC9E,gBAAA,MAAM,UAAU,GAAoB;AAClC,oBAAA,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;iBAChD,CAAC;AACF,gBAAA,OAAO,UAAU,CAAC;AACpB,aAAC,CAAC;AACF,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,KAAa,EAAA;AACvC,IAAA,IAAIC,eAAM,EAAE;QACV,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACrC,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;AAC9C,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACnC,SAAA;AACD,QAAA,OAAO,GAAG,CAAC;AACZ,KAAA;AACH,CAAC;AAED,SAAS,YAAY,CAAC,OAAY,EAAA;IAChC,IAAI,OAAO,KAAK,SAAS;AAAE,QAAA,OAAO,SAAS,CAAC;IAC5C,IAAI,OAAO,KAAK,MAAM;AAAE,QAAA,OAAO,IAAI,CAAC;IACpC,IAAI,OAAO,KAAK,OAAO;AAAE,QAAA,OAAO,KAAK,CAAC;AACtC,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB,EAAA;AACvC,IAAA,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;QACxE,OAAO;YACL,OAAO,EAAE,YAAY,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC;AACpD,YAAA,OAAO,EAAE,aAAa,CAAC,GAAG,CAAW;SACtC,CAAC;AACH,KAAA;AAAM,SAAA;QACL,OAAO;AACL,YAAA,OAAO,EAAE,KAAK;AACd,YAAA,OAAO,EAAE,aAAuB;SACjC,CAAC;AACH,KAAA;AACH,CAAC;AAED,SAAS,mBAAmB,CAAC,mBAAwB,EAAA;IACnD,MAAM,cAAc,GAAG,mBAAmB,CAAC;AAC3C,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;AACpF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;AACvF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,MAAM,CAAC,EAAE;AAC/B,QAAA,cAAc,CAAC,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAW,CAAC;AAC5D,QAAA,OAAO,cAAc,CAAC,MAAM,CAAC,CAAC;AAC/B,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;AAC3F,QAAA,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACzC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;AACvC,QAAA,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;AAC3E,QAAA,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;AACvC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,UAAU,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAC7F,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;AAC9C,QAAA,cAAc,CAAC,kBAAkB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;AACzF,QAAA,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;AAC9C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,eAAe,CAAW,CAAC;AAC7E,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,kBAAkB,GAAG,UAAU,CAC5C,mBAAmB,CAAC,2BAA2B,CAAW,CAC3D,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;AACnC,QAAA,cAAc,CAAC,QAAQ,GAAG,mBAAmB,CAAC,UAAU,CAAa,CAAC;AACtE,QAAA,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;AACnC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;AACtC,QAAA,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,aAAa,CAAoB,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAsB,CAAC;AACzF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;AACjC,QAAA,cAAc,CAAC,MAAM,GAAG,mBAAmB,CAAC,QAAQ,CAAW,CAAC;AAChE,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;AACjC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAW,CAAC;AACxE,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;AACvC,QAAA,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;AAC5E,QAAA,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;AACvC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,eAAe,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,oBAAoB,CAAW,CAAC,CAAC;AAC/F,QAAA,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;AAC7C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,uBAAuB,CAAC,EAAE;AAChD,QAAA,cAAc,CAAC,qBAAqB,GAAG,mBAAmB,CAAC,uBAAuB,CAAW,CAAC;AAC9F,QAAA,OAAO,cAAc,CAAC,uBAAuB,CAAC,CAAC;AAChD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;AAC9C,QAAA,cAAc,CAAC,mBAAmB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;AAC1F,QAAA,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;AAC9C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,UAAU,CAChD,mBAAmB,CAAC,wBAAwB,CAAW,CACxD,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;AACjD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAe,CAAC;AAC5E,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,kBAAkB,GAAG,YAAY,CAAC,mBAAmB,CAAC,oBAAoB,CAAC,CAAC,CAAC;AAC5F,QAAA,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;AAC7C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAkB,CAAC;AACrF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;AACpD,QAAA,cAAc,CAAC,yBAAyB,GAAG,mBAAmB,CAC5D,2BAA2B,CAClB,CAAC;AACZ,QAAA,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;AAC1C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,iBAAiB,CAAW,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,sBAAsB,CAAC,EAAE;QAC/C,cAAc,CAAC,mBAAmB,GAAG,IAAI,IAAI,CAC3C,mBAAmB,CAAC,sBAAsB,CAAW,CACtD,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,sBAAsB,CAAC,CAAC;AAC/C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,UAAU,CAAW,CAAC,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;AACnC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,QAAQ,GAAG,YAAY,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;AACjC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,mBAAmB,CAAC,EAAE;AAC5C,QAAA,cAAc,CAAC,iBAAiB,GAAG,mBAAmB,CACpD,mBAAmB,CACC,CAAC;AACvB,QAAA,OAAO,cAAc,CAAC,mBAAmB,CAAC,CAAC;AAC5C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,cAAc,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;AAC1F,QAAA,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACzC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,6BAA6B,CAAC,EAAE;QACtD,cAAc,CAAC,2BAA2B,GAAG,IAAI,IAAI,CACnD,mBAAmB,CAAC,6BAA6B,CAAW,CAC7D,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,6BAA6B,CAAC,CAAC;AACtD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;AACjD,QAAA,cAAc,CAAC,sBAAsB,GAAG,mBAAmB,CACzD,wBAAwB,CACK,CAAC;AAChC,QAAA,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;AACjD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,WAAW,CAAC,EAAE;QACpC,cAAc,CAAC,SAAS,GAAG,YAAY,CAAC,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC1E,QAAA,OAAO,cAAc,CAAC,WAAW,CAAC,CAAC;AACpC,KAAA;AAED,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,SAAS,aAAa,CAAC,SAAc,EAAA;IACnC,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,QAAQ,CAAC,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC;AACnE,IAAA,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE9B,QAAQ,CAAC,IAAI,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;AACjD,IAAA,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;IACxB,QAAQ,CAAC,OAAO,GAAG,YAAY,CAAC,SAAS,CAAC,SAAS,CAAC,CAAE,CAAC;AACvD,IAAA,OAAO,QAAQ,CAAC,SAAS,CAAC,CAAC;AAE3B,IAAA,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;AACzB,QAAA,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAW,CAAC;AACpD,QAAA,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,WAAW,CAAC,EAAE;AAC1B,QAAA,QAAQ,CAAC,SAAS,GAAG,SAAS,CAAC,WAAW,CAAW,CAAC;AACtD,QAAA,OAAO,QAAQ,CAAC,WAAW,CAAC,CAAC;AAC9B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,kBAAkB,CAAC,EAAE;QACjC,QAAQ,CAAC,gBAAgB,GAAG,YAAY,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC,CAAC;AACxE,QAAA,OAAO,QAAQ,CAAC,kBAAkB,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;AACzB,QAAA,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAC,CAAC;AAC1C,QAAA,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;QACrB,QAAQ,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;AACrD,QAAA,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;AACzB,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;AAC3B,QAAA,QAAQ,CAAC,yBAAyB,GAAG,SAAS,CAAC,YAAY,CAAC,CAAC;AAC7D,QAAA,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC/B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,iBAAiB,CAAC,EAAE;QAChC,QAAQ,CAAC,eAAe,GAAG,YAAY,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,QAAQ,CAAC,iBAAiB,CAAC,CAAC;AACpC,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,eAAe,CAAC,eAAoB,EAAA;IAC3C,OAAO;AACL,QAAA,IAAI,EAAE,aAAa,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;KAC7C,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,YAAiB,EAAA;IACrC,OAAO;AACL,QAAA,GAAG,EAAE,YAAY,CAAC,KAAK,CAAC;AACxB,QAAA,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC;KAC7B,CAAC;AACJ,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB,EAAA;IACvC,IACE,aAAa,KAAK,SAAS;AAC3B,QAAA,aAAa,CAAC,QAAQ,CAAC,KAAK,SAAS;QACrC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,EAC5C;AACA,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,UAAU,GAAG,EAAE,CAAC;IACtB,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,YAAY,KAAK,EAAE;AACnD,QAAA,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,YAAiB,KAAI;YAC3D,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC;AAC9C,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;AACL,QAAA,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC/D,KAAA;AAED,IAAA,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC;AACpC,CAAC;AAEK,SAAU,gBAAgB,CAAC,cAAqB,EAAA;IACpD,MAAM,SAAS,GAAG,EAAE,CAAC;IAErB,IAAI,cAAc,YAAY,KAAK,EAAE;AACnC,QAAA,cAAc,CAAC,OAAO,CAAC,CAAC,SAAc,KAAI;YACxC,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;QACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC,CAAC;AAC/C,KAAA;AAED,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAEK,SAAU,mBAAmB,CAAC,iBAAwB,EAAA;IAC1D,MAAM,YAAY,GAAG,EAAE,CAAC;IAExB,IAAI,iBAAiB,YAAY,KAAK,EAAE;AACtC,QAAA,iBAAiB,CAAC,OAAO,CAAC,CAAC,eAAoB,KAAI;YACjD,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,CAAC,CAAC;AACtD,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;QACL,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACvD,KAAA;AAED,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAEc,UAAE,yBAAyB,CACxC,oBAA4D,EAAA;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;AAAE,QAAA,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;AAAE,QAAA,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;AAC/E,QAAA,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;AACJ,gBAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,gBAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,gBAAA,OAAO,EAAE,KAAK;aACf,CAAC;AACF,YAAA,EAAE,cAAc,CAAC;AAClB,SAAA;AAAM,aAAA;YACL,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,gBAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,gBAAA,OAAO,EAAE,IAAI;aACd,CAAC;AACF,YAAA,EAAE,eAAe,CAAC;AACnB,SAAA;AACF,KAAA;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;AACJ,YAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,YAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,YAAA,OAAO,EAAE,KAAK;SACf,CAAC;AACH,KAAA;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;AACJ,YAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,YAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,YAAA,OAAO,EAAE,IAAI;SACd,CAAC;AACH,KAAA;AACH;;AC3tCA;AAeA;;;;;;;;;;AAUG;AACG,MAAO,oBAAqB,SAAQC,0BAAiB,CAAA;AACzD;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;AACH,SAAA;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;;QAG/C,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/DD;AAOA;;AAEG;MACU,2BAA2B,CAAA;AACtC;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACtD;AACF;;ACpBD;AAkCA;;AAEG;AACSE,wCASX;AATD,CAAA,UAAY,sBAAsB,EAAA;AAChC;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACX;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EATWA,8BAAsB,KAAtBA,8BAAsB,GASjC,EAAA,CAAA,CAAA,CAAA;AAED;AACA,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;AAC7B,IAAA,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAEA,8BAAsB,CAAC,WAAW;AACnD,IAAA,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;AAEG;AACG,MAAO,kBAAmB,SAAQF,0BAAiB,CAAA;AAMvD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB,EAAA;AAEzD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;;QAG3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;kBACzC,YAAY,CAAC,eAAe;kBAC5B,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;kBAC/C,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;kBACjC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,YAAY,CAAC,cAAc;kBAC3B,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;sBAC1B,YAAY,CAAC,iBAAiB;AAChC,sBAAE,qBAAqB,CAAC,iBAAkB,CAC7C;kBACD,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;kBACjE,YAAY,CAAC,iBAAiB;kBAC9B,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;kBACrC,YAAY,CAAC,aAAa;kBAC1B,qBAAqB,CAAC,aAAa;SACxC,CAAC;KACH;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;KACnD;AAED;;;;;;;;;AASG;AACO,IAAA,MAAM,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe,EAAA;AAEf,QAAA,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;AACf,YAAA,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;AAChC,YAAA,EAAE,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;AACxF,YAAA,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;AACnB,YAAA,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;AAC/E,SAAA;;AAGD,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;AACpC,YAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;AACH,SAAA;AAED,QAAA,IAAI,QAA2C,CAAC;QAChD,IAAI;AACF,YAAA,MAAM,CAAC,IAAI,CAAC,CAA2B,wBAAA,EAAA,OAAO,IAAI,cAAc,GAAG,SAAS,GAAG,WAAW,CAAA,CAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;AACxD,gBAAA,OAAO,QAAQ,CAAC;AACjB,aAAA;AAED,YAAA,eAAe,GAAG,eAAe,KAAK,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;AACnF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,KAAK,CAAC,CAAA,oCAAA,EAAuC,GAAG,CAAC,OAAO,CAAA,QAAA,EAAW,GAAG,CAAC,IAAI,CAAA,CAAE,CAAC,CAAC;AACtF,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;AAC7D,gBAAA,MAAM,GAAG,CAAC;AACX,aAAA;AACF,SAAA;AAED,QAAA,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;KACrE;AAED;;;;;;;AAOG;AACO,IAAA,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe,EAAA;AAEf,QAAA,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,CAAA,wBAAA,EAA2B,OAAO,CAAgB,aAAA,EAAA,IAAI,CAAC,YAAY;iBAChE,QAAS,CAAA,iBAAA,CAAmB,CAChC,CAAC;AACF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;;;AAID,QAAA,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;AACP,YAAA,oBAAoB;SACrB,CAAC;AACF,QAAA,IAAI,GAAG,EAAE;AACP,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;AAClD,qBAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;AACA,oBAAA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,CAAA,mBAAA,CAAqB,CAAC,CAAC;AAC/E,oBAAA,OAAO,IAAI,CAAC;AACb,iBAAA;AACF,aAAA;AACF,SAAA;;;;QAKD,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,GAAG,GAAG,CAAC,UAAU,GAAG,CAAC,CAAC;AACzE,YAAA,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;AACzC,gBAAA,MAAM,CAAC,IAAI,CAAC,CAAA,mDAAA,CAAqD,CAAC,CAAC;AACnE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;;AAGD,YAAA,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;AAC5C,gBAAA,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,CAAA,CAAA,CAAG,CAAC,CAAC;AACtE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;QAED,IAAI,CAAA,GAAG,KAAA,IAAA,IAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,CAAA,+BAAA,CAAiC,CAAC,CAAA,EAAE;AAC7F,YAAA,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;AACF,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;AAMG;AACK,IAAA,MAAM,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B,EAAA;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;AAE9B,QAAA,IAAI,cAAc,EAAE;AAClB,YAAA,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe;gBACvC,KAAKC,8BAAsB,CAAC,WAAW;AACrC,oBAAA,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAKA,8BAAsB,CAAC,KAAK;AAC/B,oBAAA,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;AACT,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,CAAA,EAAA,CAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;KAC7D;AACF;;ACjSD;AAyDA;;AAEG;MACU,yBAAyB,CAAA;AAGpC;;;AAGG;AACH,IAAA,WAAA,CAAY,YAAkC,EAAA;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;KACvE;AACF;;AChFD;AAKA;;;AAGG;AACG,MAAgB,gBAAiB,SAAQD,0BAAiB,CAAA;AAC9D;;;;AAIG;AACI,IAAA,WAAW,CAAC,OAAoB,EAAA;AACrC,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;KAChE;AAED;;;;;AAKG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;;;AAGxC,QAAA,OAAO,OAAO,CAAC;KAChB;AACF;;AC9BD;AAOA;;;AAGG;AACG,MAAO,yBAA0B,SAAQ,gBAAgB,CAAA;AAC7D;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AACF;;ACtBD;AACA;AAKA;;;AAGG;MACmB,UAAU,CAAA;AAC9B;;;;;AAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B,EAAA;AACtE,QAAA,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;KACtE;AACF;;ACpBD;AAQA;;;;;AAKG;AACG,MAAO,mBAAoB,SAAQ,UAAU,CAAA;AACjD;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC3D;AACF;;AC3BD;AAeA;;AAEG;AACG,MAAO,eAAgB,SAAQA,0BAAiB,CAAA;AAMpD;;;;;AAKG;AACH,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB,EAAA;AACrF,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;AACV,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,IAAII,oBAAW,EAAE,CAAC;AACrC,aAAA;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACpD,gBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACjE,aAAA;AACF,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACpDD;AAeA;;AAEG;MACU,sBAAsB,CAAA;AAMjC;;;AAGG;AACH,IAAA,WAAA,CAAY,SAA4B,EAAA;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;AAEnC,QAAA,IAAIJ,eAAM,EAAE;AACV,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;AACxD,gBAAA,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;AAC/E,oBAAA,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;;AAGD,YAAA,MAAM,OAAO,GAAG,CAAwB,qBAAA,EAAA,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;AACzC,gBAAA,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AAC7B,aAAA;;AAGD,YAAA,IAAI,WAAW,GAAG,CAAA,cAAA,EAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;AACtD,YAAA,IAAIK,aAAE,EAAE;AACN,gBAAA,WAAW,GAAG,CAAiB,cAAA,EAAA,OAAO,CAAC,OAAO,KAAKA,aAAE,CAAC,IAAI,EAAE,IAAIA,aAAE,CAAC,OAAO,EAAE,GAAG,CAAC;AACjF,aAAA;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7C,gBAAA,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AACjC,aAAA;AACF,SAAA;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAChD;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;KACvE;AACF;;ACnED;AAMA,MAAM,kBAAkB,GAAG,IAAIC,0BAAiB,EAAE,CAAC;SAEnC,0BAA0B,GAAA;AACxC,IAAA,OAAO,kBAAkB,CAAC;AAC5B;;ACVA;AAcA;;AAEG;AACH,MAAM,SAAS,GAAG;AAChB,IAAA,YAAY,EAAE,WAAW;AACzB;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAC/B,KAAA;CACF,CAAC;AAiCF;AACA,MAAM,sBAAsB,GAAuB;AACjD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAMC,cAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AACD;;;AAGG;AACH,SAAS,YAAY,CAAC,QAA+B,EAAA;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;AAC3D,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;AACxC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,OAAO;AACT,CAAC;AAUD;;;;;AAKG;AACH,SAAS,cAAc,CAAC,SAAiB,EAAA;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,CAAG,EAAA,eAAe,CAAC,IAAI,EAAE,CAAG,CAAA,CAAA,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAChF,IAAA,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,KAChD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;;AAEF,IAAA,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAM,CAAC,CAAK,EAAA,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED;AAEA;;;;;;AAMG;AAEa,SAAA,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQN,0BAAiB,CAAA;QAC7E,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;AAClC,YAAA,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;aACF,CAAC,EACF,KAAK,CAAC;AACR,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,KAAA,IAAA,IAAR,QAAQ,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;AAC5B,gBAAA,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACzC,gBAAA,IAAI,SAAS,EAAE;AACb,oBAAA,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAGF,mBAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACzD,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;AAE5E,oBAAA,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,wBAAA,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,yBAAA;AACD,wBAAA,QAAQ,EAAE,QAAQ;qBACnB,CAAC,EACF,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;AAChC,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,CAAA,OAAA,EAAU,iBAAiB,CAAA,CAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AAED,YAAA,OAAO,QAAQ,CAAC;SACjB;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjF;KACF,CAAC;AACJ;;AC9VA;AAgGA;;;;AAIG;AACG,SAAU,cAAc,CAAC,QAAiB,EAAA;AAC9C,IAAA,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAC7C,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,QACE,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;AACrC,QAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;AACxC,QAAA,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,EACzD;AACJ,CAAC;AAED;;;;;;;AAOG;MACU,QAAQ,CAAA;AAUnB;;;;;AAKG;IACH,WAAY,CAAA,SAAiC,EAAE,OAAA,GAA2B,EAAE,EAAA;AAC1E,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;;;AAG3B,QAAA,IAAI,CAAC,OAAO,GACP,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;KACH;AAED;;;;;AAKG;IACI,sBAAsB,GAAA;QAC3B,OAAO;AACL,YAAA,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;KACH;AACF,CAAA;AAgCD;;;;;;AAMG;SACa,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE,EAAA;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;AAC5B,QAAA,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;AACxC,KAAA;;;;IAMD,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACrF,IAAA,MAAM,SAAS,GAA2B;QACxCS,sBAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;AAC7D,QAAAC,wBAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;AACf,QAAAC,sCAA6B,EAAE;AAC/B,QAAA,IAAI,2BAA2B,EAAE;AACjC,QAAA,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;;;;QAI3DC,8BAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;AACrD,QAAAC,kBAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;AACnB,YAAA,kBAAkB,EAAE,oCAAoC;AACxD,YAAA,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;AAEF,IAAA,IAAIZ,eAAM,EAAE;;QAEV,SAAS,CAAC,IAAI,CAACa,oBAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AAC1D,QAAA,SAAS,CAAC,IAAI,CAACC,2CAAkC,EAAE,CAAC,CAAC;AACtD,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CACZC,0BAAiB,CAAC,UAAU,CAAC;AAC3B,UAAE,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,CAAA,EAAA,GAAA,eAAe,CAAC,QAAQ,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,kBAAkB,CAC/C,EACD,UAAU,CACX;UACD,UAAU,CACf,CAAC;AAEF,IAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD;;ACzPA;AASA;;AAEG;AACG,MAAO,gCAAiC,SAAQ,gBAAgB,CAAA;AAMpE;;;;;AAKG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC,EAAA;AAEnC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;KACxB;AAED;;;;AAIG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;AACxC,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;AACZ,aAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;AAC5E,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;AACA,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,MAAM,YAAY,GAChB;AACE,YAAA,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;AACJ,YAAA,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;AAC3C,YAAA,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AACvE,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,CAAA,CAAE,CACrD,CAAC;;;;;AAMF,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;AAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB,EAAA;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;;;;QAKD,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;AAClE,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,6BAA6B,CAAC,OAAoB,EAAA;AACxD,QAAA,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,KAAI;AACjE,YAAA,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACjF,SAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAY;AACjC,YAAA,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AAClE,SAAC,CAAC,CAAC;;AAGH,QAAA,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,KAAI;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACjF,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;AAClD,QAAA,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,KAAI;AAC9B,YAAA,gCAAgC,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI;AAC/C,iBAAA,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAA,EAAA,CAAI,CAAC;AAChD,SAAC,CAAC,CAAC;AAEH,QAAA,OAAO,gCAAgC,CAAC;KACzC;AAED;;;;AAIG;AACK,IAAA,8BAA8B,CAAC,OAAoB,EAAA;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,OAAO,CAAC,WAAW,CAAA,EAAG,IAAI,CAAA,CAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;AACvD,QAAA,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AACtD,oBAAA,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,oBAAA,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC9B,iBAAA;AACF,aAAA;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;AACjB,YAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;AAC3B,gBAAA,2BAA2B,IAAI,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,EAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAA,CAAE,CAAC;AACxF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,2BAA2B,CAAC;KACpC;AACF;;AChLD;AASA;;;;AAIG;AACG,MAAO,0BAA2B,SAAQ,UAAU,CAAA;AAWxD;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,UAAkB,EAAA;AACjD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACxE;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;QAC3C,OAAOC,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KAC5F;AACF;;ACzDD;;;;;;AAMG;AAKH,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEpB,MAAA,oBAAqB,SAAQvU,mBAAQ,CAAC,aAAa,CAAA;AAI9D;;;;;AAKG;IACH,WAAY,CAAA,GAAW,EAAE,OAAqC,EAAA;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;AACrB,YAAA,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;AACzC,SAAA;;QAGD,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,MAAM,gBAAgB,GAAGA,mBAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,CAAG,EAAA,WAAW,IAAI,cAAc,CAAA,CAAA,EAAI,gBAAgB,CAAA,CAAE,CAAC;AAC5E,SAAA;AAED,QAAA,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AAE1B,QAAA,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;;AAG3C,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;;QAGf,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;KAChD;AACF;;ACnDD;AAqBA;;;AAGG;MACmB,aAAa,CAAA;AAyBjC;;;;AAIG;IACH,WAAsB,CAAA,GAAW,EAAE,QAAsB,EAAA;;AAEvD,QAAA,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;AAEF,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAE7D,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;AAC7C,YAAA,IACE,CAACuT,eAAM,IAAI,OAAO,YAAY,0BAA0B;gBACxD,OAAO,YAAY,mBAAmB,EACtC;AACA,gBAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;AAC3B,aAAA;AAAM,iBAAA,IAAIe,0BAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;;;AAGzD,gBAAA,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;AAC/C,aAAA;AACF,SAAA;;AAGD,QAAA,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;AAC9D,QAAA,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AACF;;ACrFD;AAMA;;;AAGG;AACI,MAAM,UAAU,GAAGE,8BAAkB,CAAC;AAC3C,IAAA,aAAa,EAAE,oBAAoB;AACnC,IAAA,SAAS,EAAE,mBAAmB;AAC/B,CAAA,CAAC,CAAC;AAEH;;;;;;AAMG;AACG,SAAU,kCAAkC,CAChD,OAA0B,EAAA;;IAE1B,OAAO;;QAEL,WAAW,EAAE,CAAC,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAsB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,WAAW;QAC1D,cAAc,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAc,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,cAAc;KACxD,CAAC;AACJ;;AC9BA;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAiGE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA6CzC;AAlMC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAsC,EAAA;AACvD,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;AACjC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;AACzC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACjD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAyDD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC/MD;AACA;AAEA;;;;;;AAMG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;AAExC;;AAEG;QACI,IAAY,CAAA,YAAA,GAAY,KAAK,CAAC;KAqDtC;AAhOC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,CAAA,CAAE,CAAC,CAAC;AACtD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAA2C,EAAA;AAC5D,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC9C,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;AACxC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACtD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;AAChD,SAAA;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,YAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAmED;;;;;;;AAOG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;AACrB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC3OD;AAMA;;;;;AAKG;MACU,2BAA2B,CAAA;AAgBtC;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,iBAAoC,EAAA;AACnE,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;AAC3C,QAAA,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;KAC3D;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;;QAG3C,OAAOD,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KACrF;AACF;;ACjDD;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,eAAe,CAAC,OAAmB,EAAA;IACjD,OAAO,OAAO,CAAC,GAAG,GAAG,CAAA,EAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,CAAE,CAAA,GAAG,OAAO,CAAC,KAAK,CAAC;AACzE;;AC5BA;AAOA;;AAEG;AACSE,6BAUX;AAVD,CAAA,UAAY,WAAW,EAAA;AACrB;;AAEG;AACH,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AAEf;;AAEG;AACH,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,YAA2B,CAAA;AAC7B,CAAC,EAVWA,mBAAW,KAAXA,mBAAW,GAUtB,EAAA,CAAA,CAAA,CAAA;AA4FD;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAsN7B,IAAA,WAAA,CACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB,EAAA;AAExB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;;AAElF,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;AACpD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACxD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;AAChD,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;AACjD,YAAA,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;AAClD,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;AACtD,YAAA,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;AAClE,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;AAE1E,gBAAA,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;AAClF,gBAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACzD,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;AACxC,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;AAC5B,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;AACjC,YAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAE/B,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;AAClD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;AACzD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AACrD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AAErD,gBAAA,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;AAC7D,gBAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACpC,aAAA;AACF,SAAA;KACF;AA1JD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;AACL,gBAAA,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;AAC1B,gBAAA,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;KAClB;AA+ID;;;AAGG;IACI,QAAQ,GAAA;AACb,QAAA,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;AAE7B,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;AAC1B,YAAA,QAAQ,KAAK;AACX,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,SAAS,CACvE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,SAAS,CACzE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,GAAG,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,SAAS,CACzD,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,GAAG,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,GAAG,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACT,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1B;AAED;;;;;;AAMG;AACK,IAAA,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc,EAAA;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;AACR,SAAA;AAED,QAAA,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,CAAA,EAAG,GAAG,CAAI,CAAA,EAAA,KAAK,CAAE,CAAA,CAAC,CAAC;AACjC,SAAA;KACF;AACF;;AC9jBD;SAsPgB,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB,EAAA;AAEpB,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAElG,IAAA,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;AAC1E,UAAE,sCAAsC;UACtC,SAAS,CAAC;AAChB,IAAA,IAAI,2BAAoE,CAAC;AAEzE,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;AACH,KAAA;AAED,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;AAClF,QAAA,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;AACnF,KAAA;;IAGD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;;;;IAKD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;;YAEL,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;AAChB,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAEtE,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB,EAAA;;;IAGrF,MAAM,QAAQ,GAAa,CAAC,CAAA,MAAA,EAAS,WAAW,CAAI,CAAA,EAAA,aAAa,CAAE,CAAA,CAAC,CAAC;AACrE,IAAA,IAAI,QAAQ,EAAE;AACZ,QAAA,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,CAAA,CAAE,CAAC,CAAC;AAC/B,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C,EAAA;AAE9C,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAClG,IAAA,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;AACjE,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;AACxF,QAAA,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAC5E,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;AAC9D,QAAA,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;AACnF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;AACrF,QAAA,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;AACzE,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AAClC,SAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;AACA,QAAA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;AACjG,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AACjC,QAAA,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;AACA,QAAA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;AAC1F,KAAA;IAED,IACE,OAAO,GAAG,YAAY;SACrB,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;AACA,QAAA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;AACH,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACpE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;AACzC,IAAA,OAAO,sBAAsB,CAAC;AAChC;;AC3iCA;AAgGA;;AAEG;MACU,eAAe,CAAA;AAwB1B;;;;AAIG;IACH,WAAY,CAAA,MAAoC,EAAE,OAAgB,EAAA;AAChE,QAAA,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;AACF,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;AAEvB,QAAA,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;AAC7C,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;AAC/D,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAIC,MAAW,CAAC,aAAa,CAAC,CAAC;AACjE,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAGC,qBAAY,EAAE,CAAC;AAC1B,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AA1CD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,IAAW,GAAG,GAAA;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;AA4BD;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,eAAe,EAAE,IAAI,CAAC,QAAQ,EAC3B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;AACF,YAAA,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;AAChC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CAAC,OAAA,GAAiC,EAAE,EAAA;;AAC3D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,UAAU,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACzD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAA,MAAA,CAAA,MAAA,CAAA,EACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;AAC1E,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACrYD;AAoCA;;;;AAIG;AACG,MAAO,uBAAwB,SAAQC,eAAQ,CAAA;AAWnD;;;;;;;;;AASG;IACH,WACE,CAAA,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAoDpB,QAAA,IAAA,CAAA,iBAAiB,GAAG,CAAC,IAAY,KAAI;AAC3C,YAAA,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AAClC,gBAAA,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;AAC3C,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;AACvC,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;AACR,aAAA;;;;AAKD,YAAA,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,gBAAA,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;AAC5D,aAAA;AACD,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACrB,aAAA;AACH,SAAC,CAAC;AAEM,QAAA,IAAA,CAAA,uBAAuB,GAAG,CAAC,GAAW,KAAI;AAChD,YAAA,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;AACpC,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;AACR,aAAA;;;;;;YAOD,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;AAAM,iBAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;;;;AAIlC,gBAAA,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACxC,oBAAA,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;AAClB,oBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AACrB,yBAAA,IAAI,CAAC,CAAC,SAAS,KAAI;AAClB,wBAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;AACT,qBAAC,CAAC;AACD,yBAAA,KAAK,CAAC,CAAC,KAAK,KAAI;AACf,wBAAA,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACtB,qBAAC,CAAC,CAAC;AACN,iBAAA;AAAM,qBAAA;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,CAAA,mHAAA,EACE,IAAI,CAAC,MAAM,GAAG,CAChB,CAAA,sBAAA,EAAyB,IAAI,CAAC,GAAG,CAAA,WAAA,EAAc,IAAI,CAAC,OAAO,CAAA,eAAA,EACzD,IAAI,CAAC,gBACP,CAAA,CAAE,CACH,CACF,CAAC;AACH,iBAAA;AACF,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,CAAsB,mBAAA,EAAA,IAAI,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA,CACxC,CACF,CAAC;AACH,aAAA;AACH,SAAC,CAAC;AAnGA,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;AACpB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,gBAAgB;AACnB,YAAA,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,GAAG,CAAC,CAAC;AAC3F,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;KAC/B;IAEM,KAAK,GAAA;AACV,QAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;KACtB;IAEO,sBAAsB,GAAA;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACvD;IAEO,yBAAyB,GAAA;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACnE;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC,EAAA;;QAE7D,IAAI,CAAC,yBAAyB,EAAE,CAAC;AAChC,QAAA,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;AAEpC,QAAA,QAAQ,CAAC,KAAK,KAAK,IAAI,GAAG,SAAS,GAAG,KAAK,CAAC,CAAC;KAC9C;AACF;;AClLD;AAoBA;;;;;;;;;AASG;MACU,oBAAoB,CAAA;AA6d/B;;;;;;;;AAQG;IACH,WACE,CAAA,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;KACH;AApfD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,IAAW,gBAAgB,GAAA;AACzB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;KAC/C;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,IAAW,oCAAoC,GAAA;AAC7C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;KACnE;AAED;;;;AAIG;AACH,IAAA,IAAW,iCAAiC,GAAA;AAC1C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;KAChE;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,2BAA2B,GAAA;AACpC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;KAC1D;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;KACrD;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AA8BF;;ACphBD;AACA;AAEO,MAAM,qBAAqB,GAAW,EAAE,CAAC;AACzC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAW,YAAY,CAAC;AAC5C,MAAM,eAAe,GAAW,aAAa;;ACNpD;AACA;MAqBa,UAAU,CAAA;AACrB;;;;;;AAMG;IACI,aAAa,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,OAAA,GAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC9E,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;AAKG;IACK,aAAa,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAChE,QAAA,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;KACf;;;;IAKO,aAAa,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;AAC1B,QAAA,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAClD,YAAA,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,KAAK,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;AACxB,SAAA,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE;AAEjD,QAAA,IAAI,YAAY,EAAE;;;YAGhB,aAAa,GAAG,aAAa,CAAC;AAC9B,YAAA,mBAAmB,GAAG,SAAS,CAAC;YAChC,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,mBAAmB,CAAC;AACrD,gBAAA,mBAAmB,IAAI,GAAG,CAAC;aAC5B,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,GAAG,EAAE,aAAa,GAAG,CAAC,CAAC,GAAG,aAAa,IAAI,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;AAClE,gBAAA,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAED,QAAA,OAAO,CAAC,aAAa,IAAI,CAAC,IAAI,EAAE,aAAa,GAAG,CAAC,CAAC,CAAC;KACpD;IAEM,aAAa,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,QAAQ,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC;KACb;IAEM,aAAa,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;AACX,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;AAClB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AAC5C,SAAA;KACF;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;AAED,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KAChE;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC1D,QAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AACtC,QAAA,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAClC;IAEO,aAAa,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;;QAEzD,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACpD,QAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;KACvB;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA8B,GAAA,EAAE,KACJ;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;AACzD,SAAC,CAAC;AAEF,QAAA,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;AACnC,QAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC7B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAEO,aAAa,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;AACtB,QAAA,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;;gBAEb,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;AAChB,aAAA;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACtD,gBAAA,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AACF,CAAA;AAOD,IAAK,WAOJ,CAAA;AAPD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,WAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,GAOf,EAAA,CAAA,CAAA,CAAA;AAYD,IAAK,aASJ,CAAA;AATD,CAAA,UAAK,aAAa,EAAA;AAChB,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACnB,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,GASjB,EAAA,CAAA,CAAA,CAAA;MAEqB,QAAQ,CAAA;AAS5B;;AAEG;IACI,OAAO,UAAU,CAAC,MAAuB,EAAA;AAC9C,QAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;AAC1C,SAAA;AAAM,aAAA,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAChC,YAAA,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;AACzC,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;AAC1D,SAAA;KACF;IAEO,OAAO,gBAAgB,CAAC,MAAc,EAAA;AAC5C,QAAA,QAAQ,MAAM;YACZ,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;AACvB,gBAAA,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;AACxD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,CAAA,CAAE,CAAC,CAAC;AACrD,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,MAAa,EAAA;AAC1C,QAAA,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;KAC3D;IAEO,OAAO,gBAAgB,CAAC,MAAoB,EAAA;AAClD,QAAA,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;;QAEzB,IAAI;AACF,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;AACxC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AAED,QAAA,QAAQ,IAAI;YACV,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AAChB,oBAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,CAAA,CAAE,CAAC,CAAC;AACjF,iBAAA;;gBAGD,MAAM,MAAM,GAA6B,EAAE,CAAC;AAC5C,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;AACjC,oBAAA,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACtD,iBAAA;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,CAAA,CAAE,CAAC,CAAC;AACpF,iBAAA;AACD,gBAAA,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;AAClB,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7D,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA;gBACE,MAAM,IAAI,KAAK,CAAC,CAAA,qBAAA,EAAwB,IAAI,CAAO,IAAA,EAAA,MAAM,CAAE,CAAA,CAAC,CAAC;AAChE,SAAA;KACF;AACF,CAAA;AAED,MAAM,iBAAkB,SAAQ,QAAQ,CAAA;AAGtC,IAAA,WAAA,CAAY,SAAwB,EAAA;AAClC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;KAC7B;AAEM,IAAA,IAAI,CACT,MAAoB,EACpB,OAAA,GAAiC,EAAE,EAAA;QAEnC,QAAQ,IAAI,CAAC,UAAU;YACrB,KAAK,aAAa,CAAC,IAAI;AACrB,gBAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAChD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;AAC7C,SAAA;KACF;AACF,CAAA;AAED,MAAM,YAAa,SAAQ,QAAQ,CAAA;AAGjC,IAAA,WAAA,CAAY,OAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACxD,QAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;KAC7B;AACF,CAAA;AAED,MAAM,aAAc,SAAQ,QAAQ,CAAA;AAGlC,IAAA,WAAA,CAAY,KAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;KACrB;AAEM,IAAA,MAAM,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC5D,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACrD;AACF,CAAA;AAED,MAAM,WAAY,SAAQ,QAAQ,CAAA;AAGhC,IAAA,WAAA,CAAY,QAAkB,EAAA;AAC5B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;KAC3B;AAEM,IAAA,IAAI,CAAC,MAAoB,EAAE,OAAA,GAAiC,EAAE,EAAA;AACnE,QAAA,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,KACF;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACtC,SAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;KAC5D;AACF,CAAA;AAED,MAAM,cAAe,SAAQ,QAAQ,CAAA;IAInC,WAAY,CAAA,MAAgC,EAAE,IAAY,EAAA;AACxD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;AACtB,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;KACnB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;AACjD,QAAA,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC/B,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AAC3D,gBAAA,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC7D,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;ACtcD;AACA;AAOgB,SAAA,WAAW,CAAC,CAAa,EAAE,CAAa,EAAA;IACtD,IAAI,CAAC,KAAK,CAAC;AAAE,QAAA,OAAO,IAAI,CAAC;;AAEzB,IAAA,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;AAAE,QAAA,OAAO,KAAK,CAAC;AACzC,IAAA,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;AAAE,QAAA,OAAO,KAAK,CAAC;AAExC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAAE,YAAA,OAAO,KAAK,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,IAAI,CAAC;AACd;;AClBA;MA6Ba,UAAU,CAAA;AAuCrB,IAAA,WAAA,CACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC,EAAA;AAEhC,QAAA,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;AAChD,QAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;AAC1B,QAAA,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;AACjD,QAAA,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;KACpD;AAhCD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AAGD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AA2BO,IAAA,MAAM,UAAU,CAAC,OAAA,GAA4B,EAAE,EAAA;AACrD,QAAA,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;AACH,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;AACzC,YAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;AAChD,SAAA;;;AAID,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;QAGH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;AAC9C,QAAA,IAAI,EAAE,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;AAChE,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;;AAGD,QAAA,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAGH,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;AAE7C,QAAA,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AAC1E,SAAA;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAEH,QAAA,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAElF,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;AAC9C,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;AAC1C,gBAAA,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;AAChC,aAAA;AACF,SAAA;KACF;IAEM,OAAO,GAAA;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;KAC/D;IAEa,YAAY,CACxB,UAA4B,EAAE,EAAA;;AAE9B,YAAA,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;AACtB,gBAAA,MAAAuB,aAAA,CAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;AAChC,aAAA;AAED,YAAA,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;AACrB,gBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,iBAAA,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;AAErB,gBAAA,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;AACrC,oBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,qBAAA,CAAC,CAAA,CAAC;AAEH,oBAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AACzE,oBAAA,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;AAC3C,wBAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,qBAAA;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,yBAAA,CAAC,CAAA,CAAC;AACJ,qBAAA;AAAC,oBAAA,OAAO,GAAQ,EAAE;;AAEjB,wBAAA,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;AACjC,qBAAA;AAED,oBAAA,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;;AAEpC,wBAAA,MAAAA,aAAA,CAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;AACnF,qBAAA;AACF,iBAAA;gBACD,MAAM,MAAAA,aAAA,CAAA,MAAM,CAAA,CAAC;AACd,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AACF;;AClLD;AACA;MAesB,YAAY,CAAA;AAGjC;;ACnBD;AAMA,MAAM,WAAW,GAAG,IAAIpB,0BAAU,CAAC,2CAA2C,CAAC,CAAC;AAE1E,MAAO,sBAAuB,SAAQ,YAAY,CAAA;AAWtD,IAAA,WAAA,CAAY,QAA+B,EAAA;AACzC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;AAC1B,QAAA,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;KACpB;AAXO,IAAA,YAAY,CAAC,IAAqB,EAAA;AACxC,QAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,YAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAOD,IAAA,IAAW,QAAQ,GAAA;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;KACvB;AACM,IAAA,MAAM,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE,EAAA;;AACnE,QAAA,IAAI,MAAA,OAAO,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AAChC,YAAA,MAAM,WAAW,CAAC;AACnB,SAAA;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,CAAA,CAAE,CAAC,CAAC;AAC/D,SAAA;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;AAC5B,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACxC,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;;AAE/B,YAAA,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;;YAEL,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;;gBAErC,MAAM,OAAO,GAAe,MAAK;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjE,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,MAAK;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,oBAAA,IAAI,aAAa,EAAE;AACjB,wBAAA,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;AACvC,wBAAA,OAAO,EAAE,CAAC;;wBAEV,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;AAC3C,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,MAAK;AACtC,oBAAA,OAAO,EAAE,CAAC;AACV,oBAAA,MAAM,EAAE,CAAC;AACX,iBAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,MAAK;AACpC,oBAAA,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;AACtB,iBAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AAC9D,iBAAA;;AAEH,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AACF;;AC/FD;AA6BA;;;;AAIG;AACG,MAAO,oBAAqB,SAAQmB,eAAQ,CAAA;AAQhD;;;;;AAKG;IACH,WAAmB,CAAA,MAA6B,EAAE,OAAA,GAAuC,EAAE,EAAA;AACzF,QAAA,KAAK,EAAE,CAAC;QAXF,IAAU,CAAA,UAAA,GAAY,IAAI,CAAC;AAYjC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AAC/B,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC1E,QAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KACpF;IAEM,KAAK,GAAA;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAC1B,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AAEO,IAAA,MAAM,YAAY,GAAA;AACxB,QAAA,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;AACxB,QAAA,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;AACP,aAAA;AACD,YAAA,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;AAC3B,YAAA,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;AACpC,YAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,gBAAA,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;AAC/C,aAAA;AAED,YAAA,QAAQ,MAAM;AACZ,gBAAA,KAAK,0DAA0D;AAC7D,oBAAA;AACE,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;AACxC,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;AACjC,4BAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;AACxB,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,wDAAwD;AAC3D,oBAAA;AACE,wBAAA,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;AAC/C,wBAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,4BAAA,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAC9D,yBAAA;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;AAChD,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,wBAAA,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;AAC3C,wBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;AAC9C,qBAAA;AACD,oBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AACR,gBAAA,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,wBAAA,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;AACjC,wBAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;AAC9B,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,4BAAA,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACnD,yBAAA;AACD,wBAAA,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;AAC7C,wBAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnC,4BAAA,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;AAC1D,yBAAA;AACD,wBAAA,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;AACvC,wBAAA,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAChC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;AACJ,4BAAA,OAAO,EAAE,KAAK;4BACd,WAAW;AACZ,yBAAA,CAAC,CAAC;AACJ,qBAAA;oBACD,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,KAAK,CAAC,CAAA,eAAA,EAAkB,MAAM,CAAA,yBAAA,CAA2B,CAAC,CAAC;AACpE,aAAA;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;KAC9C;AACF;;AC/ID;AAkBA;;;;;AAKG;MACU,iBAAiB,CAAA;AAkY5B;;;;;AAKG;IACH,WACE,CAAA,gBAAwC,EACxC,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;KACH;AAhZD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAqBF;;AC1aD;AAqGA;;;AAGG;AACSwB,+BAcX;AAdD,CAAA,UAAY,aAAa,EAAA;AACvB;;AAEG;AACH,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb;;;AAGG;AACH,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACrB,CAAC,EAdWA,qBAAa,KAAbA,qBAAa,GAcxB,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;AACSC,qCA6CX;AA7CD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACb,CAAC,EA7CWA,2BAAmB,KAAnBA,2BAAmB,GA6C9B,EAAA,CAAA,CAAA,CAAA;AAEK,SAAU,YAAY,CAC1B,IAA8D,EAAA;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,OAAO,IAAkB,CAAC;AAC5B,CAAC;AAEe,SAAA,oBAAoB,CAAC,GAAwB,EAAE,OAAgB,EAAA;AAC7E,IAAA,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;AACnB,QAAA,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;AACnF,KAAA;AAED,IAAA,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;AACnC,QAAA,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;AACpD,KAAA;AACH,CAAC;AA2HD;;AAEG;AACSC,qCASX;AATD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,oBAAA,CAAA,GAAA,oCAAyD,CAAA;AACzD;;AAEG;AACH,IAAA,mBAAA,CAAA,wBAAA,CAAA,GAAA,yCAAkE,CAAA;AACpE,CAAC,EATWA,2BAAmB,KAAnBA,2BAAmB,GAS9B,EAAA,CAAA,CAAA;;ACtUD;AACA;AA8EA;;;;;AAKG;AACG,SAAU,sBAAsB,CACpC,QAAqF,EAAA;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,EACJ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CAAC,SAAS,CAAA,EAAA,EACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,EAEH,CAAA,EAAA,CAAA,CAAA;AACJ;;AC9GA;AA0EA;;;;;AAKG;AACG,MAAO,0BAA2B,SAAQC,cAG/C,CAAA;AAGC,IAAA,WAAA,CAAY,OAA0C,EAAA;AACpD,QAAA,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;AAEZ,QAAA,IAAI,KAAgD,CAAC;AAErD,QAAA,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,SAAS,GAAG,qCAAqC,CAClD,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,KAAK,KACR,UAAU;YACV,UAAU;AACV,YAAA,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;AAEjB,QAAA,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AACpC,YAAA,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;AAC7B,SAAA;AAED,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,KAAK,GAAA;AACV,QAAA,OAAOpB,cAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;KACjC;AACF,CAAA;AAED;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;AACzB,IAAA,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;AACrB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;IAED,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AACzB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;;AAGD,IAAA,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,KAAA,CAAC,CAAC;AACH,IAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAEzB,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;AAElE,IAAA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AACpB,QAAA,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;;AAGtF,QAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;AAC7B,QAAA,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;AACnC,YAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;AACF,YAAA,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC1F,YAAA,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;AAC5C,YAAA,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;AAC5C,YAAA,IAAI,YAAY,EAAE;AAChB,gBAAA,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;AACnC,aAAA;YACD,IACE,UAAU,KAAK,SAAS;AACxB,gBAAA,YAAY,KAAK,gBAAgB;AACjC,gBAAA,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;;AAEA,gBAAA,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AAC7B,aAAA;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;AACnC,gBAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;AAClC,gBAAA,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,CAAA,+BAAA,EAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,CAAA,CAAA,CAAG,CAC/E,CAAC;AACF,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;AAClB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ,GAAA;AAG/E,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,KAAI;;QAE1D,IAAI,GAAG,KAAK,YAAY,EAAE;AACxB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACf,KAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;AAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC,EAAA;IAEpC,OAAO;QACL,KAAK,EAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ;;AChPA;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,MAAa,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;AACrB,QAAA,MAAM,IAAI,UAAU,CAAC,CAAA,sCAAA,CAAwC,CAAC,CAAC;AAChE,KAAA;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;AACrC,QAAA,MAAM,IAAI,UAAU,CAClB,CAAA,iGAAA,CAAmG,CACpG,CAAC;AACH,KAAA;IACD,OAAO,MAAM,CAAC,KAAK;AACjB,UAAE,CAAA,MAAA,EAAS,MAAM,CAAC,MAAM,CAAI,CAAA,EAAA,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,CAAE,CAAA;AAC9D,UAAE,CAAS,MAAA,EAAA,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC;;ACtCA;AAYA;;AAEG;AACH,IAAK,WAGJ,CAAA;AAHD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,WAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACJ,IAAA,WAAA,CAAA,WAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,GAGf,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;MACU,KAAK,CAAA;AAqChB;;;AAGG;AACH,IAAA,WAAA,CAAmB,cAAsB,CAAC,EAAA;AAnC1C;;AAEG;QACK,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAE5B;;AAEG;QACK,IAAS,CAAA,SAAA,GAAW,CAAC,CAAC;AAE9B;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAU,CAAA,UAAA,GAAgB,EAAE,CAAC;AAErC;;;AAGG;AACK,QAAA,IAAA,CAAA,KAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;AAC3D,SAAA;AACD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAIqB,mBAAY,EAAE,CAAC;KACnC;AAED;;;;AAIG;AACI,IAAA,YAAY,CAAC,SAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,YAAW;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;AACxB,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AACnC,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;AACI,IAAA,MAAM,EAAE,GAAA;AACb,QAAA,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAChC,YAAA,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;AAC1B,SAAA;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,KAAI;AACjC,gBAAA,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;AAChB,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;IACK,aAAa,GAAA;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AACvC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;;AAIG;IACK,eAAe,GAAA;AACrB,QAAA,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;AACR,SAAA;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;AAC5C,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;AACR,SAAA;AAED,QAAA,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;AACtC,YAAA,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;AACvC,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,SAAS,EAAE,CAAC;AACb,aAAA;AAAM,iBAAA;gBACL,OAAO;AACR,aAAA;AACF,SAAA;KACF;AACF;;ACrJD;AAUA;;AAEG;AACG,MAAO,aAAc,SAAQN,eAAQ,CAAA;AAgBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B,EAAA;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,IAAO,CAAA,OAAA,GAAP,OAAO,CAAU;QACjB,IAAU,CAAA,UAAA,GAAV,UAAU,CAAQ;AAI1B,QAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;;QAG3B,IAAI,aAAa,GAAG,CAAC,CAAC;AACtB,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;AACjC,SAAA;AACD,QAAA,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;AACnC,YAAA,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;AACpF,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,KAAK,CAAC,IAAa,EAAA;AACxB,QAAA,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC7C,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,SAAA;QAED,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;AACnC,SAAA;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;;YAE3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;AAC3E,YAAA,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;AACrF,YAAA,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;;gBAExB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3F,gBAAA,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;AACnC,gBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;AACP,aAAA;AAAM,iBAAA;;AAEL,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;;AAE/C,oBAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;AACpB,iBAAA;AAAM,qBAAA;AACL,oBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;AACtC,iBAAA;AACD,gBAAA,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;AAChB,aAAA;AACF,SAAA;AAED,QAAA,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AACtC,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,SAAA;KACF;AACF;;AC5GD;AAMA;;AAEG;AACH;AACA,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;AAOG;MACU,YAAY,CAAA;AA4CvB,IAAA,WAAA,CAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB,EAAA;AA3CtE;;;AAGG;QACK,IAAO,CAAA,OAAA,GAAa,EAAE,CAAC;AAwC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;;QAGf,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,YAAA,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,QAAQ,GAAG,eAAe,GAAG,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,SAAA;AAED,QAAA,IAAI,OAAO,EAAE;AACX,YAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;AAClC,SAAA;KACF;AA5CD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAyCD;;;;;;;;AAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB,EAAA;AAChD,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;AAElD,QAAA,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;AACrB,QAAA,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;AAClC,YAAA,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;AAC1B,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACD,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;;AAGD,QAAA,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB,QAAA,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACtB,YAAA,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AAC7C,SAAA;KACF;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;KACnD;AACF;;ACpID;AAgBA;;;;;;;;;;;;;;;;;;;;;AAqBG;MACU,eAAe,CAAA;AAuF1B;;;;;;;;;;;AAWG;IACH,WACE,CAAA,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB,EAAA;AAlF3B;;AAEG;AACc,QAAA,IAAA,CAAA,OAAO,GAAiB,IAAIM,mBAAY,EAAE,CAAC;AAO5D;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAW,CAAA,WAAA,GAAY,KAAK,CAAC;AAErC;;AAEG;QACK,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEjC;;AAEG;QACK,IAAyB,CAAA,yBAAA,GAAW,CAAC,CAAC;AAO9C;;AAEG;QACK,IAAU,CAAA,UAAA,GAAW,CAAC,CAAC;AAE/B;;;;;;AAMG;QACK,IAAmB,CAAA,mBAAA,GAAa,EAAE,CAAC;AAE3C;;AAEG;QACK,IAAgB,CAAA,gBAAA,GAAW,CAAC,CAAC;AAErC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;AAEtC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,CAAA,CAAE,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;KAC1B;AAED;;;;AAIG;AACI,IAAA,MAAM,EAAE,GAAA;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,KAAI;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC;AAC1E,gBAAA,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;AAEhC,gBAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACvB,oBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AACvB,iBAAA;AACH,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAClC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;AAC3B,gBAAA,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;AACxB,gBAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAChC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;AAC/B,gBAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;AACpB,gBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AAC/B,gBAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;AACR,iBAAA;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;AAC5D,oBAAA,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;AACxE,wBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;AACzD,wBAAA,IAAI,CAAC,eAAe,CAAC,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;AAClB,qBAAA;AAAM,yBAAA,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;AACR,qBAAA;AAAM,yBAAA;AACL,wBAAA,OAAO,EAAE,CAAC;AACX,qBAAA;AACF,iBAAA;AACH,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACK,IAAA,oBAAoB,CAAC,IAAY,EAAA;AACvC,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACpC,QAAA,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;KACtC;AAED;;;;AAIG;AACK,IAAA,kCAAkC,CAAC,MAAqB,EAAA;QAC9D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC7F,SAAA;AAAM,aAAA;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC9D,SAAA;AAED,QAAA,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;AACrC,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;;AAQG;IACK,WAAW,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC/C,YAAA,IAAI,MAAoB,CAAC;AAEzB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5B,gBAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;AAChC,gBAAA,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;AACjD,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;AACrC,oBAAA,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;AACnB,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;AAChC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;AACK,IAAA,MAAM,uBAAuB,GAAA;AACnC,QAAA,IAAI,MAAgC,CAAC;QACrC,GAAG;AACD,YAAA,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;AACR,aAAA;AAED,YAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AAC/B,YAAA,IAAI,MAAM,EAAE;AACV,gBAAA,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;AACrC,aAAA;AACF,SAAA,QAAQ,MAAM,EAAE;KAClB;AAED;;;;AAIG;IACK,MAAM,sBAAsB,CAAC,MAAoB,EAAA;AACvD,QAAA,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;AACR,SAAA;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;AACzB,QAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;KAC/B;AAED;;;;AAIG;AACK,IAAA,WAAW,CAAC,MAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;AACxB,SAAA;KACF;AACF;;AC5UD;AAMA;;;;;;;;AAQG;AACI,eAAe,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC;IAE3B,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;AAC3C,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;AAChB,gBAAA,OAAO,EAAE,CAAC;gBACV,OAAO;AACR,aAAA;AAED,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;;YAGD,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;AACrB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,CAAA,4DAAA,EAA+D,GAAG,CAAA,aAAA,EAAgB,KAAK,CAAA,CAAE,CAC1F,CACF,CAAC;AACH,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC7B,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;AAQG;AACI,eAAe,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,KAAI;AAC7C,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AACzB,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;AAED,YAAA,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,CAAA,CAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;AACR,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AAC5C,YAAA,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;AACtB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;AACf,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC7B,KAAC,CAAC,CAAC;AACL,CAAC;AAyBD;;;;;;;AAOG;AACI,eAAe,qBAAqB,CACzC,EAAyB,EACzB,IAAY,EAAA;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;QAC3C,MAAM,EAAE,GAAGC,aAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;AAEH,QAAA,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAExB,QAAA,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACd,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;AAIG;AACI,MAAM,MAAM,GAAGC,eAAI,CAAC,SAAS,CAACD,aAAE,CAAC,IAAI,CAAC,CAAC;AAEvC,MAAM,kBAAkB,GAAGA,aAAE,CAAC,gBAAgB;;AC+qBrD;;;AAGG;AACG,MAAO,UAAW,SAAQ,aAAa,CAAA;AAqF3C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAAC7B,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AAED,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;AAC3D,YAAA,IAAI,CAAC,+BAA+B,EAAE,EAAE;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAE9D,QAAA,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;AACvF,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;KAC1F;AArKD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AA2JD;;;;;;AAMG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,WAAW,CAAC,SAAiB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;AAGG;IACI,mBAAmB,GAAA;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACtD;AAED;;;AAGG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0DG;IACI,MAAM,QAAQ,CACnB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CACzC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;AACd,oBAAA,kBAAkB,EAAEnB,eAAM,GAAG,SAAS,GAAG,OAAO,CAAC,UAAU;iBAC5D,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACjC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,MAAM,UAAU,GACX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAAA,CAC5F,CAAC;;YAEF,IAAI,CAACA,eAAM,EAAE;AACX,gBAAA,OAAO,UAAU,CAAC;AACnB,aAAA;;;;;;YAOD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;;AAE1E,gBAAA,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;AAChE,aAAA;AAED,YAAA,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;AACnC,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,kEAAA,CAAoE,CAAC,CAAC;AAC5F,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;AACb,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,wDAAA,CAA0D,CAAC,CAAC;AAClF,aAAA;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,OAAO,KAAa,KAAoC;;AACtD,gBAAA,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;AACzC,oBAAA,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;AAChD,wBAAA,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;AACpD,wBAAA,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;AAC5C,wBAAA,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;AACxD,wBAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;AAC1C,qBAAA;oBACD,KAAK,EAAE,aAAa,CAAC;AACnB,wBAAA,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;AAC1C,wBAAA,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;;;;;;;AASF,gBAAA,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CAC7B,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,sBAAsB,EACzB,EACF,kBAAmB,CAAC;AACxB,aAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;AAC/B,aAAA,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;;AAExB,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAAM,iBAAA,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;AACpB,iBAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;AAC7D,oBAAA,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;;AAEA,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAoC,EAAE,EAAA;;AAEtC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,CAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAC3F,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,EAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,wEAAwE;AAClF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,QAAQ,CAAC,OAAA,GAA+B,EAAE,EAAA;AACrD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAGxC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE,EAAA;;AAC/D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,EAAA,CAAA,CACtB,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,OAAO,CAAC,OAAA,GAA8B,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,EAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,EAAA,CACxD,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;AAKG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;AACI,IAAA,MAAM,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE,EAAA;AAIzC,QAAA,MAAM,MAAM,GAAyB;AACnC,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;AAC7D,YAAA,aAAa,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;AACvD,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;AACF,QAAA,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;AAC5C,YAAA,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;AAC9B,YAAA,uBAAuB,EAAE,OAAO;AACjC,SAAA,CAAC,CAAC;;;AAIH,QAAA,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;AAEpB,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,EAAA,MAAA,CAAA,MAAA,CAAA,EACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE,EAAA;;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACpE,iBAAA,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,EAAA,EACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE,EAAA;;AAEhC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IA8CM,MAAM,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,MAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;AAChB,YAAA,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACrB,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AAChD,YAAA,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;AACzD,SAAA;AACD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,gBAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;AACvD,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,gBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,aAAA;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;AACd,gBAAA,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;AACvB,gBAAA,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;AAC7D,aAAA;AAED,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,gBAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,aAAA;;YAGD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAO,CAAC,cAAc,CACtB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACH,gBAAA,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,CAAU,OAAA,EAAA,MAAM,CAAuC,oCAAA,EAAA,QAAQ,CAAC,aAAc,CAAE,CAAA,CACjF,CAAC;AACH,iBAAA;AACF,aAAA;;YAGD,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;AACF,oBAAA,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC9B,iBAAA;AAAC,gBAAA,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,CAA0C,uCAAA,EAAA,KAAK,CAAqJ,kJAAA,EAAA,KAAK,CAAC,OAAO,CAAE,CAAA,CACpN,CAAC;AACH,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,CAAA,CAAE,CAC3F,CAAC;AACH,aAAA;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;AAC1E,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAW;;AAE5B,oBAAA,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;AAC/B,oBAAA,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;AACvC,wBAAA,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;AACrC,qBAAA;AACD,oBAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;AACF,qBAAA,CAAC,CAAC;AACH,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;AAC5C,oBAAA,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;;;;AAIvE,oBAAA,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;AACjB,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;IACI,MAAM,cAAc,CACzB,QAAgB,EAChB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,EAC7C,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,CAAA,EACtB,kCAAkC,CAAC,cAAc,CAAC,CAAA,EAAA,CAAA,CAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;AACpE,aAAA;;AAGA,YAAA,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;AACjD,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAEO,+BAA+B,GAAA;AACrC,QAAA,IAAI,aAAa,CAAC;AAClB,QAAA,IAAI,QAAQ,CAAC;QACb,IAAI;;;;;;;;YASF,MAAM,SAAS,GAAGtB,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;gBAGjD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;gBAIvC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC9E,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;;;gBAGL,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAClD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;;;YAIxC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;AACpC,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;AAC5F,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,MAAM,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE,EAAA;;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACnE,oBAAA,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,EACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEsB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAkC,EAAA;AACtD,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,CAEtC,MAAA,CAAA,MAAA,CAAA,EAAA,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,EAAA,EACvB,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;IACI,MAAM,wBAAwB,CACnC,OAA6C,EAAA;AAE7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,uBAAuB,EACvD,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,YAAY,CACvB,gBAAyB,EACzB,OAAiC,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,EACzD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AA4ND;;AAEG;AACG,MAAO,gBAAiB,SAAQ,UAAU,CAAA;AAsE9C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;;YAE5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KACpE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAmC,EAAE,EAAA;;AACvD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,EAC1C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAAA,EAC3C,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAA8C,EAAE,EAAA;;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;AAC3F,QAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACxB,cAAc,CAAA,EAAA,EACjB,UAAU,EAAA,CAAA,CACV,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,IAAI,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAA,MAAA,CAAA,MAAA,CAAA,EACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;AAuBG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACI,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,OAAA,GAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,YAAA,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAmkBD;;AAEG;AACG,MAAO,eAAgB,SAAQ,UAAU,CAAA;AA8E7C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BG;AACI,IAAA,MAAM,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE,EAAA;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAACnB,eAAM,EAAE;AACX,gBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;AAC3E,aAAA;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAC5C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;AACZ,oBAAA,SAAS,EAAE,KAAK;AAChB,oBAAA,UAAU,EAAE,KAAK;AACjB,oBAAA,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;AACxE,oBAAA,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;AAC3E,iBAAA,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACH,YAAA,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA,CAAC,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BG;IACI,MAAM,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE,EAAA;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;AAEI,IAAA,MAAM,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACzD,OAAO,CACV,EAAA,EAAA,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,EAAA,CAAA,EAE1C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;AAChD,oBAAA,qBAAqB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;AAChE,oBAAA,iBAAiB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;AACxD,oBAAA,uBAAuB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;AACpE,oBAAA,YAAY,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,EACnC,CAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACI,MAAM,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;AAoBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE,EAAA;AAE/C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,EACxE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE,EAAA;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,EAC3D,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;AACxB,gBAAA,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;AAC1B,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;AAC1B,gBAAA,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;AAC5B,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;;AAID;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;AACF,YAAA,IAAIrB,eAAM,EAAE;AACV,gBAAA,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;AACf,iBAAA;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;AACtC,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5B,iBAAA;AAAM,qBAAA;oBACL,IAAI,GAAG,IAAuB,CAAC;AAC/B,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;AACrE,iBAAA;AAED,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAa,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACrC,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;AAkBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC5C,YAAA,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;IACK,MAAM,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;AACjF,YAAA,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,CAAA,CAAE,CAC3E,CAAC;AACH,SAAA;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AACjE,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;AAC9D,SAAA;AACD,QAAA,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;AAC7B,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,CAAA,CAAE,CACnF,CAAC;AACH,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;AACnE,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC1E,aAAA;AACD,YAAA,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;AAC5D,gBAAA,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;AACzD,oBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;AACrC,gBAAA,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;AACtE,aAAA;AAED,YAAA,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,CAA6D,2DAAA,CAAA;oBAC3D,CAAmC,gCAAA,EAAA,qBAAqB,CAAE,CAAA,CAC7D,CAAC;AACH,aAAA;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAyB;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;AAClD,oBAAA,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;AACrC,oBAAA,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;AACpE,oBAAA,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;AAClC,oBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,oBAAA,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,qBAAA,CAAC,CAAC;;;oBAGH,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;AAClB,4BAAA,WAAW,EAAE,gBAAgB;AAC9B,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,KAAI;AAChB,gBAAA,OAAO,MACL,kBAAkB,CAAC,QAAQ,EAAE;AAC3B,oBAAA,SAAS,EAAE,IAAI;AACf,oBAAA,GAAG,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,GAAG,QAAQ;AAC1C,oBAAA,KAAK,EAAE,MAAM;AACd,iBAAA,CAAC,CAAC;AACP,aAAC,EACD,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAEC,OAAO,CAAA,EAAA,EACV,cAAc,EACT,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACI,IAAA,MAAM,YAAY,CACvB,MAAgB,EAChB,UAAA,GAAqB,+BAA+B,EACpD,cAAyB,GAAA,CAAC,EAC1B,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;AACjB,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;AAE/B,YAAA,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,OAAO,IAAI,EAAE,MAAM,KAAI;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACzD,gBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,gBAAA,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,iBAAA,CAAC,CAAC;;gBAGH,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,iBAAA;aACF;;;;;AAKD,YAAA,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,IAAI,CAAC,CAAC,CACpC,CAAC;AACF,YAAA,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACtC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAQ,CAAC,cAAc,CACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAoaD;;AAEG;AACG,MAAO,cAAe,SAAQ,UAAU,CAAA;AA8D5C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;AAC5C,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAC7B,OAAO,CACV,EAAA,EAAA,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,OAAA,GAAsC,EAAE,EAAA;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,OAAA,GAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;AAC1D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,UAAU,CACrB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAqC,EAAE,EAAA;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,aAAa,CACxB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAwC,EAAE,EAAA;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;AAC9B,iBAAA,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAA,EACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;IACK,MAAM,qBAAqB,CACjC,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,CAC7C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;AAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;AAMG;AACY,IAAA,kBAAkB,CAC/B,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;AAMnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsEG;AACI,IAAA,cAAc,CACnB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE,EAAA;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAE9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,EAC7E,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,OAAO,EACV,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACK,MAAM,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,UAAU,CAAA,EAAA,EACtB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE5C,CAAA,EAAA,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;AACnB,oBAAA,MAAM,EAAE,MAAM;AACd,oBAAA,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EAC9B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;;;AAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;AAOG;AACY,IAAA,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;AAOnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE,EAAA;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAG9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,EAC/D,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,EACV,CAAC;QACH,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,EAAA,EAC9B,OAAO,CAAA,CAEb,CAAC;aACH;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,EAC3C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE,EAAA;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,EACzE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACv2LD;AAOO,eAAe,aAAa,CACjC,aAA8C,EAAA;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;;IAGF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAEzC,IAAA,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAEK,SAAU,cAAc,CAAC,GAAW,EAAA;AACxC,IAAA,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC;;ACzBA;AAiBA,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;AAEG;MACU,mBAAmB,CAAA;IAO9B,WACE,CAAA,aAA8C,EAC9C,WAAyC,EAAA;AAEzC,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;;AAEhD,YAAA,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;AAC3F,SAAA;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;;AAE1C,YAAA,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;AAClF,SAAA;AAED,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,CAAK,EAAA,EAAA,IAAI,CAAC,qBAAqB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;KAChE;;AAGM,IAAA,MAAM,kBAAkB,GAAA;;;QAG7B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;AAC3E,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,kDAAA,EAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,CAAA,EAAA,CAAI,CAC7F,CAAC;AACH,SAAA;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC;AAClC,aAAA,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;AAC7B,aAAA,KAAK,CAAC,CAAC,CAAC,CAAC;AACZ,QAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;;;;;QAM7C,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;AACxE,YAAA,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;AAC7F,SAAA;AAED,QAAA,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;;QAGxC,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;AACrD,YAAA,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;AACvD,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAIjB,oBAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,CAAG,EAAA,gBAAgB,CAAE,CAAA,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;AAE1B,YAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;;oBAE5B,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACvD,wBAAA,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpE,qBAAA;;;AAID,oBAAA,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAA,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AAC/E,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;AAED,gBAAA,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;;oBAE9B,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;AAC9B,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;;gBAGD,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;wBAEtD,MAAM,IAAI,KAAK,CACb,CAAA,oCAAA,EAAuC,YAAY,CAAoC,iCAAA,EAAA,qBAAqB,CAAI,EAAA,CAAA,CACjH,CAAC;AACH,qBAAA;;oBAGD,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;AACzD,oBAAA,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;AACjD,wBAAA,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;AACtB,qBAAA;AACF,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;AACvC,wBAAA,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;AACzC,qBAAA;AAED,oBAAA,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;AACpD,iBAAA;AACF,aAAA;;;;;YAMD,IACE,SAAS,KAAK,SAAS;AACvB,gBAAA,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;AAC3B,gBAAA,SAAS,IAAI,CAAC;AACd,gBAAA,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;AACjC,gBAAA,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;AACpE,gBAAA,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;AAC/D,aAAA;AAAM,iBAAA;gBACL,MAAM,CAAC,KAAK,CACV,CAAA,aAAA,EAAgB,KAAK,CAAuE,oEAAA,EAAA,SAAS,CAAE,CAAA,CACxG,CAAC;AACH,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;AACjB,gBAAA,uBAAuB,EAAE,CAAC;AAC3B,aAAA;AAAM,iBAAA;AACL,gBAAA,0BAA0B,EAAE,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,OAAO;AACL,YAAA,YAAY,EAAE,wBAAwB;AACtC,YAAA,0BAA0B,EAAE,0BAA0B;AACtD,YAAA,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;KACH;AACF;;ACrLD;AACA;AAEA,IAAK,eAGJ,CAAA;AAHD,CAAA,UAAK,eAAe,EAAA;AAClB,IAAA,eAAA,CAAA,eAAA,CAAA,QAAA,CAAA,GAAA,CAAA,CAAA,GAAA,QAAM,CAAA;AACN,IAAA,eAAA,CAAA,eAAA,CAAA,UAAA,CAAA,GAAA,CAAA,CAAA,GAAA,UAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,GAGnB,EAAA,CAAA,CAAA,CAAA;AAID;;AAEG;MACU,KAAK,CAAA;AAChB;;;;;AAKG;AACI,IAAA,aAAa,IAAI,CAAC,GAAW,EAAA;AAClC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;AACnC,YAAA,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,gBAAA,OAAO,EAAE,CAAC;AACX,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,MAAK;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,oBAAA,OAAO,EAAE,CAAC;AACZ,iBAAC,CAAC,CAAC;AACJ,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACI,IAAA,aAAa,MAAM,CAAC,GAAW,EAAA;AACpC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;AAC7C,gBAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;AAC3B,aAAA;AACD,YAAA,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;KACJ;AAKO,IAAA,OAAO,aAAa,CAAC,GAAW,EAAE,OAAiB,EAAA;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACnC,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,GAAW,EAAA;AACxC,QAAA,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,MAAK;AAChB,gBAAA,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACtB,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;;AAlBc,KAAI,CAAA,IAAA,GAAuC,EAAE,CAAC;AAC9C,KAAS,CAAA,SAAA,GAAkC,EAAE;;AClD9D;AAoDA;;;AAGG;MACU,SAAS,CAAA;AAKpB,IAAA,WAAA,GAAA;QAHiB,IAAK,CAAA,KAAA,GAAW,OAAO,CAAC;AAIvC,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC7C;AAED;;;;AAIG;IACI,uBAAuB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;KACpD;AAED;;AAEG;IACI,kBAAkB,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;KAC/C;AAED;;AAEG;IACI,cAAc,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;KAC3C;AAEO,IAAA,MAAM,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C,EAAA;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;AAC/B,YAAA,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;AACjD,SAAA;AAAS,gBAAA;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;KACF;AAEO,IAAA,YAAY,CAAC,SAAqC,EAAA;AACxD,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;AACnB,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC5B,SAAA;AACD,QAAA,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,CAAA,sFAAA,EAAyF,IAAI,CAAC,SAAS,CAAc,YAAA,CAAA,CACtH,CAAC;AACH,SAAA;KACF;AAqCM,IAAA,MAAM,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B,EAAA;AAE3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACJ,eAAM,IAAI,mBAAmB,YAAY,0BAA0B;AACnE,gBAAA,mBAAmB,YAAY,mBAAmB;AAClD,gBAAAe,0BAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;AAClC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;AACpD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;AACT,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAgDM,MAAM,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B,EAAA;AAE5B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;AACnF,QAAA,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACrB,eAAM,IAAI,gBAAgB,YAAY,0BAA0B;AAChE,gBAAA,gBAAgB,YAAY,mBAAmB;AAC/C,gBAAAe,0BAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;AACpC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;AAC/C,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAED;;;AAGG;AACH,MAAM,iBAAiB,CAAA;AASrB,IAAA,WAAA,GAAA;AACE,QAAA,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;AAEf,QAAA,MAAM,QAAQ,GAAGD,qBAAY,EAAE,CAAC;;AAGhC,QAAA,IAAI,CAAC,QAAQ,GAAG,CAAS,MAAA,EAAA,QAAQ,EAAE,CAAC;;;;QAIpC,IAAI,CAAC,gBAAgB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,CAAA,EAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,CAAA,EAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;;QAExL,IAAI,CAAC,oBAAoB,GAAG,CAAA,0BAAA,EAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;;QAEzE,IAAI,CAAC,kBAAkB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;AAEjD,QAAA,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;KAC9B;AAED;;;;;;AAMG;AACI,IAAA,cAAc,CACnB,UAA8E,EAAA;AAE9E,QAAA,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;AACnE,QAAA,MAAM,mBAAmB,GAAG,CAAC,IAAI,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3D,QAAA,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAGT,8BAAqB,EAAE,CAAC;QACvC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC;QACpD,IAAI,CAAC,gBAAgB,EAAE;AACrB,YAAA,SAAS,CAAC,CAAC,CAAC,GAAGI,0BAAiB,CAAC,UAAU,CAAC;kBACxC,gBAAgB,CACdmB,wCAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;kBACD,UAAU,CAAC;AAChB,SAAA;AACD,QAAA,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC;AAEjF,QAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;KACpC;AAEM,IAAA,sBAAsB,CAAC,OAAoB,EAAA;;QAEhD,IAAI,CAAC,IAAI,IAAI;AACX,YAAA,IAAI,CAAC,gBAAgB;AACrB,YAAA,CAAA,EAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,CAAE,CAAA;YACvD,EAAE;AACF,YAAA,CAAA,EAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,CAAI,CAAA,EAAA,gBAAgB,GAAG,gBAAgB,CAAA,CAAE;AAC3C,SAAA,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;AACnD,YAAA,IAAI,CAAC,IAAI,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI,CAAK,EAAA,EAAA,MAAM,CAAC,KAAK,CAAG,EAAA,gBAAgB,EAAE,CAAC;AACnE,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC;;;KAG/B;AAEM,IAAA,gBAAgB,CAAC,UAA2B,EAAA;AACjD,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;AAC5C,YAAA,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,CAAA,+BAAA,CAAiC,CAAC,CAAC;AAC3F,SAAA;;QAGD,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AACxC,QAAA,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,CAAA,8BAAA,EAAiC,UAAU,CAAC,GAAG,CAAG,CAAA,CAAA,CAAC,CAAC;AAC1E,SAAA;KACF;AAEM,IAAA,iBAAiB,CAAC,UAA2B,EAAA;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;KACvB;;IAGM,kBAAkB,GAAA;QACvB,OAAO,CAAA,EAAG,IAAI,CAAC,IAAI,CAAA,EAAG,IAAI,CAAC,kBAAkB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;KACpE;IAEM,uBAAuB,GAAA;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;KAClC;IAEM,cAAc,GAAA;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF,CAAA;AAED,MAAM,0BAA2B,SAAQjC,0BAAiB,CAAA;AAQxD,IAAA,WAAA,CACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAXZ,QAAA,IAAA,CAAA,aAAa,GAA0B;YACtD,OAAO,EAAE,IAAIkC,oBAAW,EAAE;AAC1B,YAAA,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI/B,oBAAW,EAAE;SAC3B,CAAC;AASA,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;AAExD,QAAA,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AACF,CAAA;AAED,MAAM,iCAAiC,CAAA;AAGrC,IAAA,WAAA,CAAY,YAA+B,EAAA;AACzC,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;KAC/E;AACF,CAAA;AAED,MAAM,uBAAwB,SAAQH,0BAAiB,CAAA;;;IAGrD,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;AACrD,gBAAA,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;AAC7B,aAAA;AACF,SAAA;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF,CAAA;AAED,MAAM,8BAA8B,CAAA;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACzD;AACF;;AC9fD;AAuDA;;;;AAIG;MACU,eAAe,CAAA;IA8B1B,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;aAAM,IAAI,CAAC,oBAAoB,EAAE;;YAEhC,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA;AACL,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAED,QAAA,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;AAE9F,QAAA,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;AAC7B,QAAA,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;;YAExB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACpE,SAAA;KACF;AAED;;;AAGG;IACI,WAAW,GAAA;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;KACxB;AAsCM,IAAA,MAAM,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;;;IAGb,OAA2B,EAAA;AAE3B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;AAC1F,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;AACnF,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAkDM,IAAA,MAAM,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;;;IAG/C,OAA4B,EAAA;AAE5B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;AACvC,gBAAA,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCG;AACI,IAAA,MAAM,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE,EAAA;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;AAC7D,YAAA,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAChF,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;;AAG3D,YAAA,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,EAEX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;;AAGJ,YAAA,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;AACF,YAAA,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;AAEvE,YAAA,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEoB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACgRD;;AAEG;AACG,MAAO,eAAgB,SAAQ,aAAa,CAAA;IAgEhD,WACE,CAAA,qBAA6B,EAC7B,mCAKgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;AAC5B,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;AAE1D,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AACF,oBAAA,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;AAE7E,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;AAC5E,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAClE;AApID;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AAiID;;;;;;;;;;;;;;;AAeG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;;;AAGF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CAAA,EACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EACL,iFAAiF;AACpF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,sDAAsD;AAChE,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,aAAa,CAAC,QAAgB,EAAA;AACnC,QAAA,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC/F;AAED;;;;AAIG;AACI,IAAA,mBAAmB,CAAC,QAAgB,EAAA;AACzC,QAAA,OAAO,IAAI,gBAAgB,CACzB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,kBAAkB,CAAC,QAAgB,EAAA;AACxC,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,QAAgB,EAAA;AACvC,QAAA,OAAO,IAAI,cAAc,CACvB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAyC,EAAE,EAAA;AAE3C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,OAAO,CAAC,UAAU,CAAA,EAClB,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,MAAM,CACjB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,iEAAiE;AAC3E,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;AACxC,YAAA,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;AACH,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,eAAe,CAC1B,OAAA,GAA2C,EAAE,EAAA;AAE7C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AAEH,YAAA,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;AACzC,gBAAA,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;AAEF,YAAA,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;AAC3B,oBAAA,YAAY,GAAG;AACb,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;AAEF,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;AACrC,wBAAA,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AACtE,qBAAA;AAED,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;AACpC,wBAAA,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;AACpE,qBAAA;AACF,iBAAA;AAED,gBAAA,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;AAgBG;IACI,MAAM,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE,EAAA;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;AACxC,YAAA,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;AACP,oBAAA,YAAY,EAAE;AACZ,wBAAA,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;8BACxC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;AACzD,8BAAE,EAAE;AACN,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;AAChD,wBAAA,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;8BACtC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;AACxD,8BAAE,EAAE;AACP,qBAAA;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;;;;;;;;;;;;;;;;;AAqBG;IACI,MAAM,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAkC,EAAE,EAAA;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;AAC1D,YAAA,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxD,aAAA;AACD,YAAA,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAChD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACK,IAAA,MAAM,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE,EAAA;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9D,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;AACnD,gBAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;AAClF,aAAA;AAED,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAElF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;qBACjB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACK,MAAM,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;AACnD,gBAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;AAClF,aAAA;AAED,YAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;YACnC,IAAK,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,KAAK,SAAS,EAAE;AACzD,gBAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,mBAAmB,CAChD,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,CACxC,CAAC;AACH,aAAA;AAED,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAEvF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;AAClB,qBAAC,CAAC,EACF,YAAY,EAAE,CAAA,EAAA,GAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AACtE,wBAAA,MAAM,UAAU,GAAe;AAC7B,4BAAA,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;yBAChD,CAAC;AACF,wBAAA,OAAO,UAAU,CAAC;qBACnB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,4BAA4B,CAAC;AACjC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAC/E,oBAAA,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;AACxD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,4BAA4B,CAAA,CAAA,CAAC;AAC1C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAiD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;AAC3C,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;AACvD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqEG;IACI,aAAa,CAClB,UAAqC,EAAE,EAAA;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAGF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;AAYG;AACY,IAAA,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,iCAAiC,CAAC;AACtC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,iCAAiC,GAAG,MAAAT,aAAA,CAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;AAC7D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,iCAAiC,CAAA,CAAA,CAAC;AAC/C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;AAKG;AACY,IAAA,oBAAoB,CACjC,SAAiB,EACjB,OAAA,GAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAsD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;AAKhD,oBAAA,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,wBAAA,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;AACzC,4BAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,IAAI,EAAE,QAAQ,EACX,EAAA,MAAM,EACV,CAAC;AACH,yBAAA;AACF,qBAAA;AACD,oBAAA,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;AACpC,wBAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAQ,IAAI,EAAE,MAAM,EAAK,EAAA,IAAI,EAAE,CAAC;AACjC,qBAAA;AACF,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6EG;AACI,IAAA,oBAAoB,CACzB,SAAiB,EACjB,OAAA,GAAqC,EAAE,EAAA;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;AACzE,SAAA;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;AACL;;AAEG;AACH,YAAA,MAAM,IAAI,GAAA;AACR,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,EACrE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;;;;;AAgBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;AAEpD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEF,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;;AAEpD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAkD,EAAE,EAAA;;;AAEpD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAA0C,EAAE,EAAA;;AAG5C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;IAEO,uBAAuB,GAAA;AAC7B,QAAA,IAAI,aAAa,CAAC;QAClB,IAAI;;;;;;YAOF,MAAM,SAAS,GAAGjC,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;;AAIjD,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA;;;AAGL,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAuC,EAAA;AAC3D,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AACF;;ACtnED;AACA;AAEA;;;;;;;;AAQG;MACU,qBAAqB,CAAA;AAAlC,IAAA,WAAA,GAAA;AA4GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA0DzC;AApOC;;;;AAIG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE1D,QAAA,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;AAC3B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAA,CAAE,CAAC,CAAC;AAC9D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAyC,EAAA;AAC1D,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC5C,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACpD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC9C,SAAA;AACD,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAmED;;;;;;;;;AASG;IACI,QAAQ,GAAA;;;;QAIb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;ACjPD;AACA;AAEA;;;;;;;;AAQG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6BE;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAS,CAAA,SAAA,GAAY,KAAK,CAAC;AAElC;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;KAqBhC;AA9DC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,aAAqB,EAAA;AACvC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;AAC7B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAiBD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;AACD,QAAA,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC/B;AACF;;AC3ED;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAgCE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;KAsB/B;AAvEC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,QAAgB,EAAA;AAClC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;AACxB,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAA,CAAE,CAAC,CAAC;AAC3D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAsBD;;;AAGG;IACI,QAAQ,GAAA;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;AACD,QAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC1B;AACF;;ACpFD;AA6EA;;;;;;;;;;AAUG;AACa,SAAA,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C,EAAA;AAE/C,IAAA,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;UAC7C,yBAAyB,CAAC,OAAO;UACjC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;AAED,IAAA,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACvE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;AACF,IAAA,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AAC/F,IAAA,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;AAEb,IAAA,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,GAAG,yBAAyB,CAAC,eAAe,GAAG,EAAE;AAC1F,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;AACP,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ;;ACqJA;;;AAGG;AACG,MAAO,iBAAkB,SAAQ,aAAa,CAAA;IAuGlD,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;AAAM,aAAA,IACL,CAACC,eAAM,IAAI,oBAAoB,YAAY,0BAA0B;AACrE,YAAA,oBAAoB,YAAY,mBAAmB;YACnDe,0BAAiB,CAAC,oBAAoB,CAAC,EACvC;AACA,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAAM,aAAA;;YAEL,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAC9D;AA3HD;;;;;;;;;;;AAWG;IACI,OAAO,oBAAoB,CAChC,gBAAwB;;;IAGxB,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;AACtE,QAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,YAAA,IAAIf,eAAM,EAAE;AACV,gBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AAEF,gBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,iBAAA;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,aAAA;AACF,SAAA;AAAM,aAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AACjE,YAAA,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AAC9F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,SAAA;KACF;AAiFD;;;;;;;;;;;AAWG;AACI,IAAA,kBAAkB,CAAC,aAAqB,EAAA;AAC7C,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE,EAAA;AAKpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAC/D,YAAA,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE,EAAA;AAK7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;;YAEF,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;AAChF,YAAA,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,EAAA,EACpB,cAAc,CAAA,CACjB,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;AACvD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;;;IAGK,MAAM,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE,EAAA;;AAK3C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;;YAE1E,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,EAC5E,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,OAAO,IAC/C,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,EACvD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;AACK,IAAA,MAAM,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE,EAAA;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACH,EAAA,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,OAAO,EAAA,CAAA,EAC/E,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAAwC,EAAE,EAAA;;AAG1C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA+C,EAAE,EAAA;;AAEjD,YAAA,IAAI,6BAA6B,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,MAAMT,aAAA,CAAA,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAClF,oBAAA,6BAA6B,CAAC,cAAc;AAC1C,wBAAA,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;AACrD,oBAAA,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;AACzD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,6BAA6B,CAAA,CAAA,CAAC;AAC3C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA+C,EAAE,EAAA;;;AAEjD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;AAC/B,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyEG;IACI,cAAc,CACnB,UAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,YAAA,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACxB,SAAA;;QAGD,MAAM,kBAAkB,mCACnB,OAAO,CAAA,GACN,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,GAAG,EAAE,EAC1C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;AAUG;IACI,MAAM,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE,EAAA;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;AACE,gBAAA,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC/C,gBAAA,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,EAEC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AAEF,YAAA,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;AACvC,gBAAA,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;AACjD,gBAAA,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;AAEF,YAAA,MAAM,GAAG,GACP,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC1B,EAAA,iBAAiB,CACrB,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,qBAAqB,CAC1B,SAAgB,EAChB,WAAA,GAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,aAAA,GAAwB,KAAK,EAC7B,UAA+C,EAAE,EAAA;QAEjD,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,YAAA,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;AACH,SAAA;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;AAC3B,YAAA,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;AACvB,YAAA,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;AACnD,SAAA;AAED,QAAA,MAAM,GAAG,GAAG,iCAAiC,CAAA,MAAA,CAAA,MAAA,CAAA,EAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAC/C,EAAA,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;KACxC;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 00000000..0e425423 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt b/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt new file mode 100644 index 00000000..bfe6430c --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/README.md b/node_modules/@azure/storage-blob/node_modules/tslib/README.md new file mode 100644 index 00000000..72ff8e79 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js b/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js new file mode 100644 index 00000000..aaac8bf9 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json b/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json new file mode 100644 index 00000000..aafa0e4b --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/package.json b/node_modules/@azure/storage-blob/node_modules/tslib/package.json new file mode 100644 index 00000000..0ec2c634 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts new file mode 100644 index 00000000..b8e49f08 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html new file mode 100644 index 00000000..b122e41b --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..e6d77771 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js new file mode 100644 index 00000000..2b7885c1 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/storage-blob/package.json b/node_modules/@azure/storage-blob/package.json new file mode 100644 index 00000000..5ff3c690 --- /dev/null +++ b/node_modules/@azure/storage-blob/package.json @@ -0,0 +1,187 @@ +{ + "name": "@azure/storage-blob", + "sdk-type": "client", + "version": "12.10.0", + "description": "Microsoft Azure Storage SDK for JavaScript - Blob", + "main": "./dist/index.js", + "module": "./dist-esm/storage-blob/src/index.js", + "browser": { + "./dist-esm/storage-blob/src/index.js": "./dist-esm/storage-blob/src/index.browser.js", + "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js": "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js", + "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js": "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js", + "./dist-esm/storage-blob/src/utils/utils.node.js": "./dist-esm/storage-blob/src/utils/utils.browser.js", + "./dist-esm/storage-blob/test/utils/index.js": "./dist-esm/storage-blob/test/utils/index.browser.js", + "./dist-esm/storage-blob/src/BatchUtils.js": "./dist-esm/storage-blob/src/BatchUtils.browser.js", + "./dist-esm/storage-blob/src/BlobDownloadResponse.js": "./dist-esm/storage-blob/src/BlobDownloadResponse.browser.js", + "./dist-esm/storage-blob/src/BlobQueryResponse.js": "./dist-esm/storage-blob/src/BlobQueryResponse.browser.js", + "./dist-esm/storage-common/src/BufferScheduler.js": "./dist-esm/storage-common/src/BufferScheduler.browser.js", + "./dist-esm/storage-common/src/index.js": "./dist-esm/storage-common/src/index.browser.js", + "fs": false, + "os": false, + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/storage-blob/src/index.js" + }, + "types": "./types/latest/storage-blob.d.ts", + "typesVersions": { + "<3.6": { + "*": [ + "./types/3.1/storage-blob.d.ts" + ] + } + }, + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:browser": "tsc -p . && dev-tool run bundle", + "build:node": "tsc -p . && dev-tool run bundle", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/latest types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "build:samples": "echo Obsolete;", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml", + "clean:samples": "rimraf samples/v12/javascript/node_modules samples/v12/typescript/node_modules samples/v12/typescript/dist samples/v12/typescript/package-lock.json samples/v12/javascript/package-lock.json", + "extract-api": "tsc -p . && api-extractor run --local", + "execute:samples": "dev-tool samples run samples-dev", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "karma start --single-run", + "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 300000 \"dist-esm/storage-blob/test/*.spec.js\" \"dist-esm/storage-blob/test/node/*.spec.js\"", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "generate:client": "autorest --typescript ./swagger/README.md", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser", + "test:node": "npm run clean && npm run build:test && npm run unit-test:node", + "test": "npm run clean && npm run build:test && npm run unit-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node" + }, + "files": [ + "BreakingChanges.md", + "dist/", + "dist-esm/storage-blob/src/", + "dist-esm/storage-internal-avro/src/", + "dist-esm/storage-common/src/", + "types/latest/storage-blob.d.ts", + "types/3.1/storage-blob.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "storage", + "blob", + "cloud", + "node.js", + "typescript", + "javascript", + "browser" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/", + "sideEffects": false, + "//metadata": { + "constantPaths": [ + { + "path": "src/generated/src/storageClientContext.ts", + "prefix": "packageVersion" + }, + { + "path": "src/utils/constants.ts", + "prefix": "SDK_VERSION" + }, + { + "path": "swagger/README.md", + "prefix": "package-version" + } + ] + }, + "//sampleConfiguration": { + "skip": [ + "advancedRequestOptions.js", + "anonymousAuth.js", + "azureAdAuth.js", + "customPipeline.js", + "customizedClientHeaders.js", + "listBlobsByHierarchy.js", + "listBlobs.js", + "listContainers.js", + "snapshots.js", + "sharedKeyAuth.js" + ], + "productName": "Azure Storage Blob", + "productSlugs": [ + "azure", + "azure-storage" + ], + "requiredResources": { + "Azure Storage Account": "https://docs.microsoft.com/azure/storage/common/storage-account-overview" + } + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/identity": "^2.0.1", + "@azure/test-utils": "^1.0.0", + "@azure-tools/test-recorder": "^1.0.0", + "@azure/test-utils-perf": "^1.0.0", + "@microsoft/api-extractor": "7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/node-fetch": "^2.5.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "dotenv": "^8.2.0", + "downlevel-dts": "^0.8.0", + "es6-promise": "^4.2.5", + "eslint": "^7.15.0", + "esm": "^3.2.18", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-json-preprocessor": "^0.3.3", + "karma-json-to-file-reporter": "^1.0.1", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^13.5.1", + "rimraf": "^3.0.0", + "source-map-support": "^0.5.9", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "util": "^0.12.1" + } +} diff --git a/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts new file mode 100644 index 00000000..19b0109a --- /dev/null +++ b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts @@ -0,0 +1,9345 @@ +/// +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive"; +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool"; +export { BaseRequestPolicy }; +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /* + * The name of the blob. + */ + readonly name: string; + /* + * The name of the storage container the blob is associated with. + */ + readonly containerName: string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /* + * Gets the lease Id. + * + * @readonly + */ + readonly leaseId: string; + /* + * Gets the url. + * + * @readonly + */ + readonly url: string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} +export declare interface BlobPrefix { + name: string; +} +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +export declare interface BlobTag { + key: string; + value: string; +} +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; +declare interface ClearRange { + start: number; + end: number; +} +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /* + * The name of the container. + */ + readonly containerName: string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential }; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; +export { deserializationPolicy }; +/** Defines values for EncryptionAlgorithmType. */ +export declare type EncryptionAlgorithmType = "AES256"; +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} +export { HttpHeaders }; +export { HttpOperationResponse }; +export { HttpRequestBody }; +export { IHttpClient }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} +declare interface PageRange { + start: number; + end: number; +} +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} +export { PollerLike }; +export { PollOperationState }; +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range }; +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; +export { RequestPolicy }; +export { RequestPolicyFactory }; +export { RequestPolicyOptions }; +export { RestError }; +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /* + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + readonly ipRange: SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + */ + audience?: string | string[]; +} +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} +/** + * Blob tags. + */ +export declare type Tags = Record; +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} +export { WebResource }; +export {}; diff --git a/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts new file mode 100644 index 00000000..74f594b0 --- /dev/null +++ b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts @@ -0,0 +1,9716 @@ +/// + +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; + +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} + +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive"; + +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} + +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} + +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; + +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; + +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} + +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} + +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; + +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} + +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} + +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} + +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool"; + +export { BaseRequestPolicy } + +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} + +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} + +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; + +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} + +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} + +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; + +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; + +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} + +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} + +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} + +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} + +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /** + * The name of the blob. + */ + get name(): string; + /** + * The name of the storage container the blob is associated with. + */ + get containerName(): string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} + +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; + +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; + +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; + +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} + +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; + +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} + +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; + +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} + +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} + +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} + +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} + +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; + +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; + +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} + +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} + +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} + +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} + +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; + +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} + +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} + +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId(): string; + /** + * Gets the url. + * + * @readonly + */ + get url(): string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} + +export declare interface BlobPrefix { + name: string; +} + +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} + +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} + +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} + +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; + +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} + +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} + +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} + +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} + +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; + +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} + +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} + +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} + +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} + +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; + +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} + +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} + +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; + +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} + +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; + +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; + +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; + +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} + +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; + +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} + +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +export declare interface BlobTag { + key: string; + value: string; +} + +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} + +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; + +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; + +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; + +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} + +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} + +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; + +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} + +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; + +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; + +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} + +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; + +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} + +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; + +declare interface ClearRange { + start: number; + end: number; +} + +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} + +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} + +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} + +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /** + * The name of the container. + */ + get containerName(): string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} + +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} + +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; + +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} + +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; + +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} + +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} + +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; + +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; + +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} + +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; + +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; + +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} + +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} + +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; + +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} + +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} + +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} + +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; + +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; + +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; + +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; + +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; + +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} + +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} + +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential } + +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} + +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; + +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; + +export { deserializationPolicy } + +/** Defines values for EncryptionAlgorithmType. */ +export declare type EncryptionAlgorithmType = "AES256"; + +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} + +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} + +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} + +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; + +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} + +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; + +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} + +export { HttpHeaders } + +export { HttpOperationResponse } + +export { HttpRequestBody } + +export { IHttpClient } + +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; + +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} + +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} + +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; + +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; + +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; + +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} + +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} + +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; + +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} + +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} + +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} + +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} + +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} + +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; + +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} + +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} + +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; + +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; + +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} + +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; + +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} + +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} + +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; + +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} + +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} + +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; + +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} + +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; + +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} + +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} + +declare interface PageRange { + start: number; + end: number; +} + +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} + +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} + +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} + +export { PollerLike } + +export { PollOperationState } + +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} + +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; + +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range } + +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; + +export { RequestPolicy } + +export { RequestPolicyFactory } + +export { RequestPolicyOptions } + +export { RestError } + +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} + +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} + +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} + +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange(): SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} + +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} + +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; + +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; + +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; + +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; + +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} + +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} + +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} + +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} + +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} + +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} + +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} + +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; + +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} + +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} + +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} + +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} + +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} + +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} + +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} + +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; + +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + */ + audience?: string | string[]; +} + +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} + +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} + +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} + +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} + +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} + +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; + +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} + +/** + * Blob tags. + */ +export declare type Tags = Record; + +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} + +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} + +export { WebResource } + +export { } diff --git a/node_modules/@opentelemetry/api/CHANGELOG.md b/node_modules/@opentelemetry/api/CHANGELOG.md new file mode 100644 index 00000000..2eff2035 --- /dev/null +++ b/node_modules/@opentelemetry/api/CHANGELOG.md @@ -0,0 +1,204 @@ +# CHANGELOG + +All notable changes to this project will be documented in this file. + +## [1.1.0](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.4...v1.1.0) (2022-01-25) + + +### Features + +* add tracestate implementation to api ([#147](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/147)) ([82842c7](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/82842c7097614e6ece99e73838ac5e94ff5460b7)) +* define common attributes type ([#142](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/142)) ([ae9bead](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/ae9bead17750d35dec4b63cfae098087666abc85)) +* **trace:** add optional schema url to TracerProvider.getTracer ([#129](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/129)) ([aa65fc6](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/aa65fc66809d45090d6e4951c265386d17ccc6f6)) + + +### Bug Fixes + +* export tracer options ([#154](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/154)) ([b125324](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/b125324438fb2f24eb80c7c6673afc8cfc99575e)) + +### [1.0.4](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.3...v1.0.4) (2021-12-18) + + +### Bug Fixes + +* align globalThis fallbacks with otel-core ([#126](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/126)) ([3507de7](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/3507de7c3b95396696657c021953b0b24a63a029)) + +### [1.0.3](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.2...v1.0.3) (2021-08-30) + + +### Bug Fixes + +* remove all circular dependencies ([#119](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/119)) ([a8083e8](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/a8083e84b23227828745da80fd5fe512357dd34b)) + +## 1.0.2 + +### :bug: Bug Fix + +* [#105](https://github.com/open-telemetry/opentelemetry-js-api/pull/105) fix: set delegate after successful registration ([@Flarna](https://github.com/Flarna)) +* [#94](https://github.com/open-telemetry/opentelemetry-js-api/pull/94) fix: enforce strict equality on prerelease versions ([@dyladan](https://github.com/dyladan)) + +### :memo: Documentation + +* [#106](https://github.com/open-telemetry/opentelemetry-js-api/pull/106) docs: fix crash in README example ([@trentm](https://github.com/trentm)) +* [#101](https://github.com/open-telemetry/opentelemetry-js-api/pull/101) docs: Format example for tracer.startActiveSpan ([@ad-m](https://github.com/ad-m)) +* [#99](https://github.com/open-telemetry/opentelemetry-js-api/pull/99) chore: fix link to API docs ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#109](https://github.com/open-telemetry/opentelemetry-js-api/pull/109) internal: add missing approvers from core ([@dyladan](https://github.com/dyladan)) +* [#103](https://github.com/open-telemetry/opentelemetry-js-api/pull/103) chore: reuse NoopTracer in ProxyTracer ([@Flarna](https://github.com/Flarna)) + +### Committers: 4 + +* Adam Dobrawy ([@ad-m](https://github.com/ad-m)) +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Trent Mick ([@trentm](https://github.com/trentm)) + +## 1.0.1 + +### :bug: Bug Fix + +* [#96](https://github.com/open-telemetry/opentelemetry-js-api/pull/96) chore: remove circular dependency ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## 1.0.0 + +### :memo: Documentation + +* [#89](https://github.com/open-telemetry/opentelemetry-js-api/pull/89) chore: update upgrade guidelines ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#90](https://github.com/open-telemetry/opentelemetry-js-api/pull/90) chore: enable typescript 4.3 noImplicitOverride option ([@Flarna](https://github.com/Flarna)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) + +## 0.21.0 + +### :boom: Breaking Change + +* [#78](https://github.com/open-telemetry/opentelemetry-js-api/pull/78) feat: unify signatures of `with` and `bind` ([@Rauno56](https://github.com/Rauno56)) +* [#46](https://github.com/open-telemetry/opentelemetry-js-api/pull/46) chore: do not export singletons ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#81](https://github.com/open-telemetry/opentelemetry-js-api/pull/81) chore: function overloads implementation of startActiveSpan in noop t… ([@naseemkullah](https://github.com/naseemkullah)) + +### :house: Internal + +* [#84](https://github.com/open-telemetry/opentelemetry-js-api/pull/84) chore: remove unused backwards compatibility folder ([@Flarna](https://github.com/Flarna)) +* [#85](https://github.com/open-telemetry/opentelemetry-js-api/pull/85) chore: add node:16 to the test matrix ([@Rauno56](https://github.com/Rauno56)) +* [#63](https://github.com/open-telemetry/opentelemetry-js-api/pull/63) feat: debug log global registrations and logger overwrites ([@Rauno56](https://github.com/Rauno56)) +* [#75](https://github.com/open-telemetry/opentelemetry-js-api/pull/75) Add CodeQL Security Scan ([@xukaren](https://github.com/xukaren)) +* [#79](https://github.com/open-telemetry/opentelemetry-js-api/pull/79) chore: fix eslint config ([@Rauno56](https://github.com/Rauno56)) + +### Committers: 5 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Karen Xu ([@xukaren](https://github.com/xukaren)) +* Naseem ([@naseemkullah](https://github.com/naseemkullah)) +* Rauno Viskus ([@Rauno56](https://github.com/Rauno56)) + +## 0.20.0 + +### :rocket: Enhancement + +* [#69](https://github.com/open-telemetry/opentelemetry-js-api/pull/69) feat(context): add utils method to remove keys from context ([@vmarchaud](https://github.com/vmarchaud)) +* [#71](https://github.com/open-telemetry/opentelemetry-js-api/pull/71) chore: export baggage ([@dyladan](https://github.com/dyladan)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Valentin Marchaud ([@vmarchaud](https://github.com/vmarchaud)) + +## 0.19.0 + +### :boom: Breaking Change + +* [#55](https://github.com/open-telemetry/opentelemetry-js-api/pull/55) chore: move baggage methods in propagation namespace ([@vmarchaud](https://github.com/vmarchaud)) +* [#65](https://github.com/open-telemetry/opentelemetry-js-api/pull/65) chore: remove suppress instrumentation ([@dyladan](https://github.com/dyladan)) +* [#60](https://github.com/open-telemetry/opentelemetry-js-api/pull/60) chore: removing timed event ([@obecny](https://github.com/obecny)) +* [#58](https://github.com/open-telemetry/opentelemetry-js-api/pull/58) chore: use spancontext for link ([@dyladan](https://github.com/dyladan)) +* [#47](https://github.com/open-telemetry/opentelemetry-js-api/pull/47) chore: move span method for context in trace API #40 ([@vmarchaud](https://github.com/vmarchaud)) +* [#45](https://github.com/open-telemetry/opentelemetry-js-api/pull/45) chore: rename `span#context()` to `span#spanContext` ([@dyladan](https://github.com/dyladan)) +* [#43](https://github.com/open-telemetry/opentelemetry-js-api/pull/43) chore: renaming noop span to non recording span ([@obecny](https://github.com/obecny)) +* [#32](https://github.com/open-telemetry/opentelemetry-js-api/pull/32) feat!: return boolean success value from setGlobalXXX methods ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#62](https://github.com/open-telemetry/opentelemetry-js-api/pull/62) chore: adding component logger ([@obecny](https://github.com/obecny)) +* [#54](https://github.com/open-telemetry/opentelemetry-js-api/pull/54) feat: add tracer.startActiveSpan() ([@naseemkullah](https://github.com/naseemkullah)) +* [#58](https://github.com/open-telemetry/opentelemetry-js-api/pull/58) chore: use spancontext for link ([@dyladan](https://github.com/dyladan)) +* [#51](https://github.com/open-telemetry/opentelemetry-js-api/pull/51) feat: add function to wrap SpanContext in NonRecordingSpan #49 ([@dyladan](https://github.com/dyladan)) + +### :memo: Documentation + +* [#64](https://github.com/open-telemetry/opentelemetry-js-api/pull/64) chore: document the reason for symbol.for ([@dyladan](https://github.com/dyladan)) +* [#44](https://github.com/open-telemetry/opentelemetry-js-api/pull/44) chore: updating readme headline and fixing links ([@obecny](https://github.com/obecny)) + +### Committers: 6 + +* Bartlomiej Obecny ([@obecny](https://github.com/obecny)) +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Naseem ([@naseemkullah](https://github.com/naseemkullah)) +* Valentin Marchaud ([@vmarchaud](https://github.com/vmarchaud)) +* t2t2 ([@t2t2](https://github.com/t2t2)) + +## 1.0.0-rc.0 + +### :memo: Documentation + +* [#20](https://github.com/open-telemetry/opentelemetry-js-api/pull/20) docs: document latest manual tracing ([@dyladan](https://github.com/dyladan)) +* [#18](https://github.com/open-telemetry/opentelemetry-js-api/pull/18) chore: deploy docs on a release ([@dyladan](https://github.com/dyladan)) +* [#19](https://github.com/open-telemetry/opentelemetry-js-api/pull/19) docs: fix readme links ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## 0.18.1 + +### :bug: Bug Fix + +* [#16](https://github.com/open-telemetry/opentelemetry-js-api/pull/16) fix: Reverse the direction of the semver check ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## v0.18.0 + +### :boom: Breaking Change + +* [#9](https://github.com/open-telemetry/opentelemetry-js-api/pull/9) chore: refactor diag logger ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#10](https://github.com/open-telemetry/opentelemetry-js-api/pull/10) Use semver to determine API compatibility ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#12](https://github.com/open-telemetry/opentelemetry-js-api/pull/12) chore: don't disable rule eqeqeq ([@Flarna](https://github.com/Flarna)) +* [#8](https://github.com/open-telemetry/opentelemetry-js-api/pull/8) chore: remove nycrc in favor of tsconfig reporting ([@dyladan](https://github.com/dyladan)) +* [#3](https://github.com/open-telemetry/opentelemetry-js-api/pull/3) chore: add test workflow ([@dyladan](https://github.com/dyladan)) +* [#4](https://github.com/open-telemetry/opentelemetry-js-api/pull/4) chore: remove package lock ([@dyladan](https://github.com/dyladan)) +* [#2](https://github.com/open-telemetry/opentelemetry-js-api/pull/2) chore: add lint workflow ([@dyladan](https://github.com/dyladan)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) + +## v0.17.0 + +Versions previous to `0.18.0` were developed in another repository. +To see previous changelog entries see the [CHANGELOG.md](https://github.com/open-telemetry/opentelemetry-js/blob/main/CHANGELOG.md). diff --git a/node_modules/@opentelemetry/api/LICENSE b/node_modules/@opentelemetry/api/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/node_modules/@opentelemetry/api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@opentelemetry/api/README.md b/node_modules/@opentelemetry/api/README.md new file mode 100644 index 00000000..363cfac1 --- /dev/null +++ b/node_modules/@opentelemetry/api/README.md @@ -0,0 +1,140 @@ + +--- +

+ + API Documentation +   •   + Getting In Touch (GitHub Discussions) + +

+ +

+ + GitHub release (latest by date including pre-releases) + + + Codecov Status + + + license + +
+ + Build Status + + + Build Status + +

+ +--- + +# OpenTelemetry API for JavaScript + +[![NPM Published Version][npm-img]][npm-url] + +This package provides everything needed to interact with the OpenTelemetry API, including all TypeScript interfaces, enums, and no-op implementations. It is intended for use both on the server and in the browser. + +The methods in this package perform no operations by default. This means they can be safely called by a library or end-user application whether there is an SDK registered or not. In order to generate and export telemetry data, you will also need an SDK such as the [OpenTelemetry JS SDK][opentelemetry-js]. + +## Tracing Quick Start + +### You Will Need + +- An application you wish to instrument +- [OpenTelemetry JS SDK][opentelemetry-js] +- Node.js >=8.5.0 (14+ is preferred) or an ECMAScript 5+ compatible browser + +**Note:** ECMAScript 5+ compatibility is for this package only. Please refer to the documentation for the SDK you are using to determine its minimum ECMAScript version. + +**Note for library authors:** Only your end users will need an OpenTelemetry SDK. If you wish to support OpenTelemetry in your library, you only need to use the OpenTelemetry API. For more information, please read the [tracing documentation][docs-tracing]. + +### Install Dependencies + +```sh +npm install @opentelemetry/api @opentelemetry/sdk-trace-base +``` + +### Trace Your Application + +In order to get started with tracing, you will need to first register an SDK. The SDK you are using may provide a convenience method which calls the registration methods for you, but if you would like to call them directly they are documented here: [sdk registration methods][docs-sdk-registration]. + +Once you have registered an SDK, you can start and end spans. A simple example of basic SDK registration and tracing a simple operation is below. The example should export spans to the console once per second. For more information, see the [tracing documentation][docs-tracing]. + +```javascript +const { trace } = require("@opentelemetry/api"); +const { BasicTracerProvider, ConsoleSpanExporter, SimpleSpanProcessor } = require("@opentelemetry/sdk-trace-base"); + +// Create and register an SDK +const provider = new BasicTracerProvider(); +provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter())); +trace.setGlobalTracerProvider(provider); + +// Acquire a tracer from the global tracer provider which will be used to trace the application +const name = 'my-application-name'; +const version = '0.1.0'; +const tracer = trace.getTracer(name, version); + +// Trace your application by creating spans +async function operation() { + const span = tracer.startSpan("do operation"); + + // mock some work by sleeping 1 second + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }) + + span.end(); +} + +async function main() { + while (true) { + await operation(); + } +} + +main(); +``` + +## Version Compatibility + +Because the npm installer and node module resolution algorithm could potentially allow two or more copies of any given package to exist within the same `node_modules` structure, the OpenTelemetry API takes advantage of a variable on the `global` object to store the global API. When an API method in the API package is called, it checks if this `global` API exists and proxies calls to it if and only if it is a compatible API version. This means if a package has a dependency on an OpenTelemetry API version which is not compatible with the API used by the end user, the package will receive a no-op implementation of the API. + +## Upgrade Guidelines + +### 0.21.0 to 1.0.0 + +No breaking changes + +### 0.20.0 to 0.21.0 + +- [#78](https://github.com/open-telemetry/opentelemetry-js-api/issues/78) `api.context.bind` arguments reversed and `context` is now a required argument. +- [#46](https://github.com/open-telemetry/opentelemetry-js-api/issues/46) Noop classes and singletons are no longer exported. To create a noop span it is recommended to use `api.trace.wrapSpanContext` with `INVALID_SPAN_CONTEXT` instead of using the `NOOP_TRACER`. + +### 1.0.0-rc.3 to 0.20.0 + +- Removing `TimedEvent` which was not part of spec +- `HttpBaggage` renamed to `HttpBaggagePropagator` +- [#45](https://github.com/open-telemetry/opentelemetry-js-api/pull/45) `Span#context` renamed to `Span#spanContext` +- [#47](https://github.com/open-telemetry/opentelemetry-js-api/pull/47) `getSpan`/`setSpan`/`getSpanContext`/`setSpanContext` moved to `trace` namespace +- [#55](https://github.com/open-telemetry/opentelemetry-js-api/pull/55) `getBaggage`/`setBaggage`/`createBaggage` moved to `propagation` namespace + +## Useful links + +- For more information on OpenTelemetry, visit: +- For more about OpenTelemetry JavaScript: +- For help or feedback on this project, join us in [GitHub Discussions][discussions-url] + +## License + +Apache 2.0 - See [LICENSE][license-url] for more information. + +[opentelemetry-js]: https://github.com/open-telemetry/opentelemetry-js + +[discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions +[license-url]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/LICENSE +[license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat +[npm-url]: https://www.npmjs.com/package/@opentelemetry/api +[npm-img]: https://badge.fury.io/js/%40opentelemetry%2Fapi.svg +[docs-tracing]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/tracing.md +[docs-sdk-registration]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/sdk-registration.md diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.d.ts b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts new file mode 100644 index 00000000..61caee8d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js b/node_modules/@opentelemetry/api/build/esm/api/context.js new file mode 100644 index 00000000..358f0e46 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js @@ -0,0 +1,90 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +import { NoopContextManager } from '../context/NoopContextManager'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + return ContextAPI; +}()); +export { ContextAPI }; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js.map b/node_modules/@opentelemetry/api/build/esm/api/context.js.map new file mode 100644 index 00000000..2cac260b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AAEnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAuB,GAA9B,UAA+B,cAA8B;QAC3D,OAAO,cAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,2BAAM,GAAb;QACE,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,yBAAI,GAAX,UACE,OAAgB,EAChB,EAAK,EACL,OAA8B;;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,CAAA,KAAA,IAAI,CAAC,kBAAkB,EAAE,CAAA,CAAC,IAAI,0BAAC,OAAO,EAAE,EAAE,EAAE,OAAO,GAAK,IAAI,GAAE;IACvE,CAAC;IAED;;;;;OAKG;IACI,yBAAI,GAAX,UAAe,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,uCAAkB,GAA1B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,4BAAO,GAAd;QACE,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AAnED,IAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts new file mode 100644 index 00000000..5c8c0bef --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts @@ -0,0 +1,38 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLogLevel } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - [Optional] The DiagLogger instance to set as the default logger. + * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO. + * @returns true if the logger was successfully registered, else false + */ + setLogger: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js b/node_modules/@opentelemetry/api/build/esm/api/diag.js new file mode 100644 index 00000000..05d85791 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js @@ -0,0 +1,90 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagComponentLogger } from '../diag/ComponentLogger'; +import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; +import { DiagLogLevel, } from '../diag/types'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + self.setLogger = function (logger, logLevel) { + var _a, _b; + if (logLevel === void 0) { logLevel = DiagLogLevel.INFO; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + var oldLogger = getGlobal('diag'); + var newLogger = createLogLevelDiagLogger(logLevel, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger) { + var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return registerGlobal('diag', newLogger, self, true); + }; + self.disable = function () { + unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +export { DiagAPI }; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js.map b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map new file mode 100644 index 00000000..29a4d743 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,OAAO,EAAE,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,EAIL,YAAY,GACb,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAElC,IAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH;IAYE;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAc,IAAI,EAAE;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,IAAI,CAAC,SAAS,GAAG,UACf,MAAkB,EAClB,QAA0C;;YAA1C,yBAAA,EAAA,WAAyB,YAAY,CAAC,IAAI;YAE1C,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAM,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;YACpC,IAAM,SAAS,GAAG,wBAAwB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC7D,kFAAkF;YAClF,IAAI,SAAS,EAAE;gBACb,IAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,6CAA2C,KAAO,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,+DAA6D,KAAO,CACrE,CAAC;aACH;YAED,OAAO,cAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG;YACb,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,UAAC,OAA+B;YAC3D,OAAO,IAAI,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAtED,oDAAoD;IACtC,gBAAQ,GAAtB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IA4FH,cAAC;AAAD,CAAC,AAtGD,IAsGC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n self.setLogger = (\n logger: DiagLogger,\n logLevel: DiagLogLevel = DiagLogLevel.INFO\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(logLevel, logger);\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - [Optional] The DiagLogger instance to set as the default logger.\n * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO.\n * @returns true if the logger was successfully registered, else false\n */\n public setLogger!: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean;\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts new file mode 100644 index 00000000..cf372cf2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts @@ -0,0 +1,48 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js b/node_modules/@opentelemetry/api/build/esm/api/propagation.js new file mode 100644 index 00000000..ef694cb7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; +import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = createBaggage; + this.getBaggage = getBaggage; + this.setBaggage = setBaggage; + this.deleteBaggage = deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return registerGlobal(API_NAME, propagator, DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +export { PropagationAPI }; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map new file mode 100644 index 00000000..60ddbbdb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,qBAAqB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,EACL,oBAAoB,EACpB,oBAAoB,GAIrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,UAAU,EACV,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,IAAM,wBAAwB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH;IAGE,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,aAAa,CAAC;QAE9B,eAAU,GAAG,UAAU,CAAC;QAExB,eAAU,GAAG,UAAU,CAAC;QAExB,kBAAa,GAAG,aAAa,CAAC;IApEd,CAAC;IAExB,uDAAuD;IACzC,0BAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAmB,GAA1B,UAA2B,UAA6B;QACtD,OAAO,cAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,+BAAM,GAAb,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,gCAAO,GAAd,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,+BAAM,GAAb;QACE,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,gCAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAUO,6CAAoB,GAA5B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;IACH,qBAAC;AAAD,CAAC,AA7ED,IA6EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts new file mode 100644 index 00000000..9135b74b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts @@ -0,0 +1,39 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js b/node_modules/@opentelemetry/api/build/esm/api/trace.js new file mode 100644 index 00000000..1b40cad3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; +import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +var TraceAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function TraceAPI() { + this._proxyTracerProvider = new ProxyTracerProvider(); + this.wrapSpanContext = wrapSpanContext; + this.isSpanContextValid = isSpanContextValid; + this.deleteSpan = deleteSpan; + this.getSpan = getSpan; + this.getSpanContext = getSpanContext; + this.setSpan = setSpan; + this.setSpanContext = setSpanContext; + } + /** Get the singleton instance of the Trace API */ + TraceAPI.getInstance = function () { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + }; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + TraceAPI.prototype.setGlobalTracerProvider = function (provider) { + var success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + }; + /** + * Returns the global tracer provider. + */ + TraceAPI.prototype.getTracerProvider = function () { + return getGlobal(API_NAME) || this._proxyTracerProvider; + }; + /** + * Returns a tracer from the global tracer provider. + */ + TraceAPI.prototype.getTracer = function (name, version) { + return this.getTracerProvider().getTracer(name, version); + }; + /** Remove the global tracer provider */ + TraceAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider(); + }; + return TraceAPI; +}()); +export { TraceAPI }; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js.map b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map new file mode 100644 index 00000000..9bc9e046 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,kBAAkB,EAClB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAGpC,OAAO,EACL,UAAU,EACV,OAAO,EACP,cAAc,EACd,OAAO,EACP,cAAc,GACf,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH;IAKE,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,eAAe,CAAC;QAElC,uBAAkB,GAAG,kBAAkB,CAAC;QAExC,eAAU,GAAG,UAAU,CAAC;QAExB,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;QAEhC,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;IA5DhB,CAAC;IAExB,kDAAkD;IACpC,oBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,0CAAuB,GAA9B,UAA+B,QAAwB;QACrD,IAAM,OAAO,GAAG,cAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,OAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,oCAAiB,GAAxB;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,4BAAS,GAAhB,UAAiB,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,0BAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;IACxD,CAAC;IAeH,eAAC;AAAD,CAAC,AAnED,IAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts new file mode 100644 index 00000000..a387685b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts @@ -0,0 +1,23 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js new file mode 100644 index 00000000..9cd7bee2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +/** + * Baggage key + */ +var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map new file mode 100644 index 00000000..221f6d06 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD;;GAEG;AACH,IAAM,WAAW,GAAG,gBAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts new file mode 100644 index 00000000..e6b45540 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js new file mode 100644 index 00000000..30de6cbf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js @@ -0,0 +1,61 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +export { BaggageImpl }; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map new file mode 100644 index 00000000..d67f90c0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH;IAGE,qBAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW;QAClB,IAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,mCAAa,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,UAAC,EAAM;gBAAL,CAAC,QAAA,EAAE,CAAC,QAAA;YAAM,OAAA,CAAC,CAAC,EAAE,CAAC,CAAC;QAAN,CAAM,CAAC,CAAC;IACrE,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW,EAAE,KAAmB;QACvC,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,iCAAW,GAAX,UAAY,GAAW;QACrB,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,mCAAa,GAAb;QAAc,cAAiB;aAAjB,UAAiB,EAAjB,qBAAiB,EAAjB,IAAiB;YAAjB,yBAAiB;;QAC7B,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAkB,UAAI,EAAJ,aAAI,EAAJ,kBAAI,EAAJ,IAAI,EAAE;YAAnB,IAAM,GAAG,aAAA;YACZ,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,2BAAK,GAAL;QACE,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts new file mode 100644 index 00000000..9cd991c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js new file mode 100644 index 00000000..0e7dc36d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export var baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map new file mode 100644 index 00000000..f0748665 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts new file mode 100644 index 00000000..32fa0ec6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js b/node_modules/@opentelemetry/api/build/esm/baggage/types.js new file mode 100644 index 00000000..928faad0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map new file mode 100644 index 00000000..ae80c197 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts new file mode 100644 index 00000000..9955d9e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js new file mode 100644 index 00000000..3cc27165 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagAPI } from '../api/diag'; +import { BaggageImpl } from './internal/baggage-impl'; +import { baggageEntryMetadataSymbol } from './internal/symbol'; +var diag = DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new BaggageImpl(new Map(Object.entries(entries))); +} +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map new file mode 100644 index 00000000..f5a00f5b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAG/D,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,OAA0C;IAA1C,wBAAA,EAAA,YAA0C;IAE1C,OAAO,IAAI,WAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,uDAAqD,OAAO,GAAK,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,0BAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts new file mode 100644 index 00000000..19994fb2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js new file mode 100644 index 00000000..dbb1e497 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map new file mode 100644 index 00000000..b5906674 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts new file mode 100644 index 00000000..e175a7fd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js b/node_modules/@opentelemetry/api/build/esm/common/Exception.js new file mode 100644 index 00000000..6522a8e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map new file mode 100644 index 00000000..989dd3d1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts new file mode 100644 index 00000000..cc3c502c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js b/node_modules/@opentelemetry/api/build/esm/common/Time.js new file mode 100644 index 00000000..2abdf582 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js.map b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map new file mode 100644 index 00000000..ae124f03 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts new file mode 100644 index 00000000..48a16597 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js new file mode 100644 index 00000000..755322dd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +import { ROOT_CONTEXT } from './context'; +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], args)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +export { NoopContextManager }; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map new file mode 100644 index 00000000..f6217afc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAGzC;IAAA;IAyBA,CAAC;IAxBC,mCAAM,GAAN;QACE,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,iCAAI,GAAJ,UACE,QAAuB,EACvB,EAAK,EACL,OAA8B;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,EAAE,CAAC,IAAI,OAAP,EAAE,iBAAM,OAAO,GAAK,IAAI,GAAE;IACnC,CAAC;IAED,iCAAI,GAAJ,UAAQ,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mCAAM,GAAN;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IAED,oCAAO,GAAP;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IACH,yBAAC;AAAD,CAAC,AAzBD,IAyBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.d.ts b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts new file mode 100644 index 00000000..8be02594 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js b/node_modules/@opentelemetry/api/build/esm/context/context.js new file mode 100644 index 00000000..f8909dee --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** Get a key to uniquely identify a context value */ +export function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +export var ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js.map b/node_modules/@opentelemetry/api/build/esm/context/context.js.map new file mode 100644 index 00000000..7a7affd0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,qDAAqD;AACrD,MAAM,UAAU,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AAED;IAGE;;;;OAIG;IACH,qBAAY,aAAoC;QAC9C,mBAAmB;QACnB,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,IAAK,OAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,EAA7B,CAA6B,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,EAAE,KAAc;YAC1C,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,UAAC,GAAW;YAC7B,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAyBH,kBAAC;AAAD,CAAC,AApDD,IAoDC;AAED,6FAA6F;AAC7F,MAAM,CAAC,IAAM,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.d.ts b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts new file mode 100644 index 00000000..7e866320 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js b/node_modules/@opentelemetry/api/build/esm/context/types.js new file mode 100644 index 00000000..928faad0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js.map b/node_modules/@opentelemetry/api/build/esm/context/types.js.map new file mode 100644 index 00000000..d438aa31 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts new file mode 100644 index 00000000..f0609503 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js new file mode 100644 index 00000000..47de4dbb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js @@ -0,0 +1,77 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal } from '../internal/global-utils'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +export { DiagComponentLogger }; +function logProxy(funcName, namespace, args) { + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map new file mode 100644 index 00000000..be524a74 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGrD;;;;;;;;GAQG;AACH;IAGE,6BAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,qCAAO,GAAd;QAAe,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IACH,0BAAC;AAAD,CAAC,AA1BD,IA0BC;;AAED,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAe,IAAoC,EAAE;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts new file mode 100644 index 00000000..fa3db1e3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js new file mode 100644 index 00000000..5965b8aa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +export { DiagConsoleLogger }; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map new file mode 100644 index 00000000..fbfd0cf7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,IAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH;IACE;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;IAkCH,wBAAC;AAAD,CAAC,AA3DD,IA2DC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts new file mode 100644 index 00000000..11b35740 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts @@ -0,0 +1,3 @@ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.js b/node_modules/@opentelemetry/api/build/esm/diag/index.js new file mode 100644 index 00000000..61581955 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.js.map b/node_modules/@opentelemetry/api/build/esm/diag/index.js.map new file mode 100644 index 00000000..a859e321 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/diag/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,iBAAiB,CAAC;AAChC,cAAc,SAAS,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './consoleLogger';\nexport * from './types';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts new file mode 100644 index 00000000..890b9f1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js new file mode 100644 index 00000000..aedab38d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagLogLevel } from '../types'; +export function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < DiagLogLevel.NONE) { + maxLevel = DiagLogLevel.NONE; + } + else if (maxLevel > DiagLogLevel.ALL) { + maxLevel = DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + var theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', DiagLogLevel.ERROR), + warn: _filterFunc('warn', DiagLogLevel.WARN), + info: _filterFunc('info', DiagLogLevel.INFO), + debug: _filterFunc('debug', DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), + }; +} +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map new file mode 100644 index 00000000..7f9fafde --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAA+B,YAAY,EAAE,MAAM,UAAU,CAAC;AAErE,MAAM,UAAU,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,YAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,IAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,YAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts new file mode 100644 index 00000000..ac71ee3b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js new file mode 100644 index 00000000..7d5ba63d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map new file mode 100644 index 00000000..bf20aeaa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,MAAM,UAAU,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts new file mode 100644 index 00000000..92b0c578 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts @@ -0,0 +1,70 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js b/node_modules/@opentelemetry/api/build/esm/diag/types.js new file mode 100644 index 00000000..306585e8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel || (DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js.map b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map new file mode 100644 index 00000000..aa3a7f1c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH;;;;GAIG;AACH,MAAM,CAAN,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,KAAZ,YAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.d.ts b/node_modules/@opentelemetry/api/build/esm/index.d.ts new file mode 100644 index 00000000..33ff2300 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.d.ts @@ -0,0 +1,57 @@ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +export type { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +import { TraceAPI } from './api/trace'; +export type { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +import { PropagationAPI } from './api/propagation'; +export type { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +import { DiagAPI } from './api/diag'; +export type { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +declare const _default: { + trace: TraceAPI; + context: ContextAPI; + propagation: PropagationAPI; + diag: DiagAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js b/node_modules/@opentelemetry/api/build/esm/index.js new file mode 100644 index 00000000..97ce3464 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export var context = ContextAPI.getInstance(); +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export var trace = TraceAPI.getInstance(); +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export var propagation = PropagationAPI.getInstance(); +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export var diag = DiagAPI.instance(); +export default { + trace: trace, + context: context, + propagation: propagation, + diag: diag, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js.map b/node_modules/@opentelemetry/api/build/esm/index.js.map new file mode 100644 index 00000000..4a82f0b6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,iBAAiB,CAAC;AAChC,OAAO,EAAE,8BAA8B,EAAE,MAAM,iBAAiB,CAAC;AACjE,cAAc,oBAAoB,CAAC;AACnC,cAAc,eAAe,CAAC;AAC9B,cAAc,qBAAqB,CAAC;AACpC,cAAc,QAAQ,CAAC;AACvB,cAAc,iCAAiC,CAAC;AAChD,cAAc,oBAAoB,CAAC;AACnC,cAAc,cAAc,CAAC;AAC7B,cAAc,qBAAqB,CAAC;AACpC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,iBAAiB,CAAC;AAChC,cAAc,wBAAwB,CAAC;AACvC,cAAc,sBAAsB,CAAC;AACrC,cAAc,mBAAmB,CAAC;AAClC,cAAc,cAAc,CAAC;AAC7B,cAAc,qBAAqB,CAAC;AACpC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAC1D,cAAc,yBAAyB,CAAC;AACxC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,wBAAwB,CAAC;AAEvC,OAAO,EACL,kBAAkB,EAClB,cAAc,EACd,aAAa,GACd,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,cAAc,EACd,eAAe,EACf,oBAAoB,GACrB,MAAM,gCAAgC,CAAC;AAExC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAEhC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,iCAAiC;AACjC,MAAM,CAAC,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAEhD,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,+BAA+B;AAC/B,MAAM,CAAC,IAAM,KAAK,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,qCAAqC;AACrC,MAAM,CAAC,IAAM,WAAW,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC;AAExD,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAGrC;;;;;GAKG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEvC,eAAe;IACb,KAAK,OAAA;IACL,OAAO,SAAA;IACP,WAAW,aAAA;IACX,IAAI,MAAA;CACL,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport * from './common/Exception';\nexport * from './common/Time';\nexport * from './common/Attributes';\nexport * from './diag';\nexport * from './propagation/TextMapPropagator';\nexport * from './trace/attributes';\nexport * from './trace/link';\nexport * from './trace/ProxyTracer';\nexport * from './trace/ProxyTracerProvider';\nexport * from './trace/Sampler';\nexport * from './trace/SamplingResult';\nexport * from './trace/span_context';\nexport * from './trace/span_kind';\nexport * from './trace/span';\nexport * from './trace/SpanOptions';\nexport * from './trace/status';\nexport * from './trace/trace_flags';\nexport * from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport * from './trace/tracer_provider';\nexport * from './trace/tracer';\nexport * from './trace/tracer_options';\n\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\n\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\n\nexport * from './context/context';\nexport * from './context/types';\n\nimport { ContextAPI } from './api/context';\nexport type { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n\nimport { TraceAPI } from './api/trace';\nexport type { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n\nimport { PropagationAPI } from './api/propagation';\nexport type { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n\nimport { DiagAPI } from './api/diag';\nexport type { DiagAPI } from './api/diag';\n\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n\nexport default {\n trace,\n context,\n propagation,\n diag,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts new file mode 100644 index 00000000..bb90097f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts @@ -0,0 +1,16 @@ +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js new file mode 100644 index 00000000..2fddde35 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { _globalThis } from '../platform'; +import { VERSION } from '../version'; +import { isCompatible } from './semver'; +var major = VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = _globalThis; +export function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error('@opentelemetry/api: All API registration versions must match'); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + VERSION + "."); + return true; +} +export function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +export function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map new file mode 100644 index 00000000..b8ea29e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAG1C,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAExC,IAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,0BAAwB,KAAO,CAChC,CAAC;AAEF,IAAM,OAAO,GAAG,WAAyB,CAAC;AAE1C,MAAM,UAAU,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAqB;;IAArB,8BAAA,EAAA,qBAAqB;IAErB,IAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,OAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kEAAgE,IAAM,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,OAAO,EAAE;QAC3B,0DAA0D;QAC1D,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,8DAA8D,CAC/D,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,iDAA+C,IAAI,UAAK,OAAO,MAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,UAAU,SAAS,CACvB,IAAU;;IAEV,IAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,oDAAkD,IAAI,UAAK,OAAO,MAAG,CACtE,CAAC;IACF,IAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n '@opentelemetry/api: All API registration versions must match'\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts new file mode 100644 index 00000000..d9f4259e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js b/node_modules/@opentelemetry/api/build/esm/internal/semver.js new file mode 100644 index 00000000..2a788a0b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { VERSION } from '../version'; +var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export function _makeCompatibilityCheck(ownVersion) { + var acceptedVersions = new Set([ownVersion]); + var rejectedVersions = new Set(); + var myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return function () { return false; }; + } + var ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + var globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + var globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export var isCompatible = _makeCompatibilityCheck(VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map new file mode 100644 index 00000000..75579a4a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAErC,IAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,uBAAuB,CACrC,UAAkB;IAElB,IAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,IAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,IAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;KACpB;IAED,IAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,IAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,IAAM,YAAY,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts new file mode 100644 index 00000000..e73fd73e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js new file mode 100644 index 00000000..1d20f5d9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Updates to this file should also be replicated to @opentelemetry/api-metrics and +// @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +export var _globalThis = typeof globalThis === 'object' ? globalThis : + typeof self === 'object' ? self : + typeof window === 'object' ? window : + typeof global === 'object' ? global : + {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map new file mode 100644 index 00000000..5936f5fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,mFAAmF;AACnF,2BAA2B;AAE3B;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AAC9E,MAAM,CAAC,IAAM,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;IAC7C,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACjC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;YACrC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;gBACrC,EAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/api-metrics and\n// @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object' ? globalThis :\n typeof self === 'object' ? self :\n typeof window === 'object' ? window :\n typeof global === 'object' ? global :\n {} as typeof globalThis;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts new file mode 100644 index 00000000..ba20e123 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js new file mode 100644 index 00000000..efcad2e9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map new file mode 100644 index 00000000..07adcd9d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts new file mode 100644 index 00000000..90595da9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js b/node_modules/@opentelemetry/api/build/esm/platform/index.js new file mode 100644 index 00000000..c0df125c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './node'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map new file mode 100644 index 00000000..9494c531 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,QAAQ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts new file mode 100644 index 00000000..e3c83e5d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js new file mode 100644 index 00000000..feb97000 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +export var _globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map new file mode 100644 index 00000000..5911136a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,gEAAgE;AAChE,oEAAoE;AACpE,MAAM,CAAC,IAAM,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts new file mode 100644 index 00000000..ba20e123 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js new file mode 100644 index 00000000..efcad2e9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map new file mode 100644 index 00000000..f2797186 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 00000000..398021f3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js new file mode 100644 index 00000000..8e629012 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * No-op implementations of {@link TextMapPropagator}. + */ +var NoopTextMapPropagator = /** @class */ (function () { + function NoopTextMapPropagator() { + } + /** Noop inject function does nothing */ + NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; + /** Noop extract function does nothing and returns the input context */ + NoopTextMapPropagator.prototype.extract = function (context, _carrier) { + return context; + }; + NoopTextMapPropagator.prototype.fields = function () { + return []; + }; + return NoopTextMapPropagator; +}()); +export { NoopTextMapPropagator }; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 00000000..40be5661 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;GAEG;AACH;IAAA;IAUA,CAAC;IATC,wCAAwC;IACxC,sCAAM,GAAN,UAAO,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,uCAAO,GAAP,UAAQ,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,sCAAM,GAAN;QACE,OAAO,EAAE,CAAC;IACZ,CAAC;IACH,4BAAC;AAAD,CAAC,AAVD,IAUC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts new file mode 100644 index 00000000..dc39367b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js new file mode 100644 index 00000000..c5f5311f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var defaultTextMapGetter = { + get: function (carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys: function (carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +export var defaultTextMapSetter = { + set: function (carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map new file mode 100644 index 00000000..9de7d06a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAkGH,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,YAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEF,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts new file mode 100644 index 00000000..883cf914 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js new file mode 100644 index 00000000..ba2e0ff5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +export { NonRecordingSpan }; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map new file mode 100644 index 00000000..6af5c8d4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAKhE;;;;GAIG;AACH;IACE,0BACmB,YAAgD;QAAhD,6BAAA,EAAA,mCAAgD;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,sCAAW,GAAX;QACE,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,uCAAY,GAAZ,UAAa,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,wCAAa,GAAb,UAAc,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,mCAAQ,GAAR,UAAS,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,oCAAS,GAAT,UAAU,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,qCAAU,GAAV,UAAW,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,8BAAG,GAAH,UAAI,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,sCAAW,GAAX;QACE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,0CAAe,GAAf,UAAgB,UAAqB,EAAE,KAAiB,IAAS,CAAC;IACpE,uBAAC;AAAD,CAAC,AA7CD,IA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts new file mode 100644 index 00000000..0e059c99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js new file mode 100644 index 00000000..7ad14a2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { getSpanContext, setSpan } from '../trace/context-utils'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { isSpanContextValid } from './spancontext-utils'; +var context = ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan(); + } + var parentFromContext = context && getSpanContext(context); + if (isSpanContext(parentFromContext) && + isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : context.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = setSpan(parentContext, span); + return context.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +export { NoopTracer }; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map new file mode 100644 index 00000000..71cfd1b6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAKzD,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAEzC;;GAEG;AACH;IAAA;IAgEA,CAAC;IA/DC,gCAAgC;IAChC,8BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,IAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;QAED,IAAM,iBAAiB,GAAG,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,kBAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,gBAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,oCAAe,GAAf,UACE,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,IAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QAC9C,IAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,IAAM,kBAAkB,GAAG,OAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,OAAO,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IACH,iBAAC;AAAD,CAAC,AAhED,IAgEC;;AAED,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst context = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? context.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return context.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts new file mode 100644 index 00000000..ec0fe792 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js new file mode 100644 index 00000000..14d44c22 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { + return new NoopTracer(); + }; + return NoopTracerProvider; +}()); +export { NoopTracerProvider }; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map new file mode 100644 index 00000000..2705ab49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAK1C;;;;;GAKG;AACH;IAAA;IAQA,CAAC;IAPC,sCAAS,GAAT,UACE,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IACH,yBAAC;AAAD,CAAC,AARD,IAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts new file mode 100644 index 00000000..116cc5c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js new file mode 100644 index 00000000..341991b9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +var NOOP_TRACER = new NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +export { ProxyTracer }; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map new file mode 100644 index 00000000..178a519d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAM1C,IAAM,WAAW,GAAG,IAAI,UAAU,EAAE,CAAC;AAErC;;GAEG;AACH;IAIE,qBACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,+BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,qCAAe,GAAf,UACE,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,IAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,gCAAU,GAAlB;QACE,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,IAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEvF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options);\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts new file mode 100644 index 00000000..ee7eafa9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js new file mode 100644 index 00000000..3cc735c9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ProxyTracer } from './ProxyTracer'; +import { NoopTracerProvider } from './NoopTracerProvider'; +var NOOP_TRACER_PROVIDER = new NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + }; + return ProxyTracerProvider; +}()); +export { ProxyTracerProvider }; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map new file mode 100644 index 00000000..fc9179b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAG1D,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH;IAAA;IA+BA,CAAC;IA5BC;;OAEG;IACH,uCAAS,GAAT,UAAU,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,WAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,yCAAW,GAAX;;QACE,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,yCAAW,GAAX,UAAY,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,+CAAiB,GAAjB,UACE,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IACH,0BAAC;AAAD,CAAC,AA/BD,IA+BC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts new file mode 100644 index 00000000..dd8dc811 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts @@ -0,0 +1,30 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js new file mode 100644 index 00000000..22a60a12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map new file mode 100644 index 00000000..c88d802f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts new file mode 100644 index 00000000..93948465 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts @@ -0,0 +1,39 @@ +import { SpanAttributes } from './attributes'; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js new file mode 100644 index 00000000..3587532d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision || (SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map new file mode 100644 index 00000000..9c73981d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH;;;GAGG;AACH,MAAM,CAAN,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,KAAhB,gBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\n\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts new file mode 100644 index 00000000..c8045689 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js new file mode 100644 index 00000000..06b42b15 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map new file mode 100644 index 00000000..9132a33e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts new file mode 100644 index 00000000..a2a5d2a2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js new file mode 100644 index 00000000..6f1b9a3f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map new file mode 100644 index 00000000..2b02be78 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts new file mode 100644 index 00000000..1d46619b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts @@ -0,0 +1,37 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js new file mode 100644 index 00000000..1f7525a9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js @@ -0,0 +1,66 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +import { NonRecordingSpan } from './NonRecordingSpan'; +/** + * span key + */ +var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan(spanContext)); +} +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map new file mode 100644 index 00000000..81b36164 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;GAEG;AACH,IAAM,QAAQ,GAAG,gBAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts new file mode 100644 index 00000000..9ed5ecb7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js new file mode 100644 index 00000000..75140693 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { validateKey, validateValue } from './tracestate-validators'; +var MAX_TRACE_STATE_ITEMS = 32; +var MAX_TRACE_STATE_LEN = 512; +var LIST_MEMBERS_SEPARATOR = ','; +var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +var TraceStateImpl = /** @class */ (function () { + function TraceStateImpl(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + TraceStateImpl.prototype.set = function (key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + var traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + }; + TraceStateImpl.prototype.unset = function (key) { + var traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + }; + TraceStateImpl.prototype.get = function (key) { + return this._internalState.get(key); + }; + TraceStateImpl.prototype.serialize = function () { + var _this = this; + return this._keys() + .reduce(function (agg, key) { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + }; + TraceStateImpl.prototype._parse = function (rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce(function (agg, part) { + var listMember = part.trim(); // Optional Whitespace (OWS) handling + var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + var key = listMember.slice(0, i); + var value = listMember.slice(i + 1, part.length); + if (validateKey(key) && validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + }; + TraceStateImpl.prototype._keys = function () { + return Array.from(this._internalState.keys()).reverse(); + }; + TraceStateImpl.prototype._clone = function () { + var traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + }; + return TraceStateImpl; +}()); +export { TraceStateImpl }; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map new file mode 100644 index 00000000..102cb4f3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAErE,IAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,IAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,IAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,IAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH;IAGE,wBAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,8BAAK,GAAL,UAAM,GAAW;QACf,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,kCAAS,GAAT;QAAA,iBAOC;QANC,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,UAAC,GAAa,EAAE,GAAG;YACzB,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,KAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,+BAAM,GAAd,UAAe,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,UAAC,GAAwB,EAAE,IAAY;YAC7C,IAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,IAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,IAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,WAAW,CAAC,GAAG,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,8BAAK,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,+BAAM,GAAd;QACE,IAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,qBAAC;AAAD,CAAC,AA5ED,IA4EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts new file mode 100644 index 00000000..4917f99d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js new file mode 100644 index 00000000..1d3f14bc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; +var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; +var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); +var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map new file mode 100644 index 00000000..72899fc6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,IAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,IAAM,SAAS,GAAG,UAAQ,oBAAoB,YAAS,CAAC;AACxD,IAAM,gBAAgB,GAAG,aAAW,oBAAoB,qBAAgB,oBAAoB,WAAQ,CAAC;AACrG,IAAM,eAAe,GAAG,IAAI,MAAM,CAAC,SAAO,SAAS,SAAI,gBAAgB,OAAI,CAAC,CAAC;AAC7E,IAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,IAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts new file mode 100644 index 00000000..e3b51fe4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js new file mode 100644 index 00000000..feea4691 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceStateImpl } from './tracestate-impl'; +export function createTraceState(rawTraceState) { + return new TraceStateImpl(rawTraceState); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map new file mode 100644 index 00000000..f1b6a146 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAGnD,MAAM,UAAU,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,cAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts new file mode 100644 index 00000000..e32dab9d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js new file mode 100644 index 00000000..36dc1d62 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceFlags } from './trace_flags'; +export var INVALID_SPANID = '0000000000000000'; +export var INVALID_TRACEID = '00000000000000000000000000000000'; +export var INVALID_SPAN_CONTEXT = { + traceId: INVALID_TRACEID, + spanId: INVALID_SPANID, + traceFlags: TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map new file mode 100644 index 00000000..970a3f41 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,MAAM,CAAC,IAAM,cAAc,GAAG,kBAAkB,CAAC;AACjD,MAAM,CAAC,IAAM,eAAe,GAAG,kCAAkC,CAAC;AAClE,MAAM,CAAC,IAAM,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,eAAe;IACxB,MAAM,EAAE,cAAc;IACtB,UAAU,EAAE,UAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts new file mode 100644 index 00000000..d1326f51 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts @@ -0,0 +1,24 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js b/node_modules/@opentelemetry/api/build/esm/trace/link.js new file mode 100644 index 00000000..7c8accbe --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js.map b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map new file mode 100644 index 00000000..9fb53089 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts new file mode 100644 index 00000000..c5f28141 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js b/node_modules/@opentelemetry/api/build/esm/trace/span.js new file mode 100644 index 00000000..f41c7f6f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map new file mode 100644 index 00000000..0a4f1024 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts new file mode 100644 index 00000000..f30933a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js new file mode 100644 index 00000000..1bb88b0d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map new file mode 100644 index 00000000..dbf0bfe5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts new file mode 100644 index 00000000..a89846f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js new file mode 100644 index 00000000..1119df92 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js @@ -0,0 +1,43 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map new file mode 100644 index 00000000..deb6be73 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,KAAR,QAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts new file mode 100644 index 00000000..f1911114 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js new file mode 100644 index 00000000..88545bb5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; +import { NonRecordingSpan } from './NonRecordingSpan'; +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +export function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; +} +export function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; +} +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export function wrapSpanContext(spanContext) { + return new NonRecordingSpan(spanContext); +} +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map new file mode 100644 index 00000000..a4dc6c2c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3E,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,IAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,IAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,MAAM,UAAU,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,eAAe,CAAC;AAC1E,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,cAAc,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts new file mode 100644 index 00000000..ab19a68f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js b/node_modules/@opentelemetry/api/build/esm/trace/status.js new file mode 100644 index 00000000..5ee55e42 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js @@ -0,0 +1,20 @@ +/** + * An enumeration of status codes. + */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js.map b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map new file mode 100644 index 00000000..af7e7d7f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":"AAsBA;;GAEG;AACH,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts new file mode 100644 index 00000000..11288ba9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js new file mode 100644 index 00000000..8a7b0007 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags || (TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map new file mode 100644 index 00000000..2ea8680d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,KAAV,UAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts new file mode 100644 index 00000000..f275b8be --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js new file mode 100644 index 00000000..a6c368f8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map new file mode 100644 index 00000000..64a3d7a2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts new file mode 100644 index 00000000..25090899 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js new file mode 100644 index 00000000..ad066dc3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map new file mode 100644 index 00000000..77f6ae93 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts new file mode 100644 index 00000000..f3bbccfc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js new file mode 100644 index 00000000..470a3a73 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map new file mode 100644 index 00000000..70365afc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts new file mode 100644 index 00000000..9b2f7a95 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js new file mode 100644 index 00000000..adf432a6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map new file mode 100644 index 00000000..bfc1cbd1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.d.ts b/node_modules/@opentelemetry/api/build/esm/version.d.ts new file mode 100644 index 00000000..28a7146c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.1.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js b/node_modules/@opentelemetry/api/build/esm/version.js new file mode 100644 index 00000000..385005d3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// this is autogenerated file, see scripts/version-update.js +export var VERSION = '1.1.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js.map b/node_modules/@opentelemetry/api/build/esm/version.js.map new file mode 100644 index 00000000..fdccd0e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,4DAA4D;AAC5D,MAAM,CAAC,IAAM,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.1.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.d.ts b/node_modules/@opentelemetry/api/build/src/api/context.d.ts new file mode 100644 index 00000000..61caee8d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js b/node_modules/@opentelemetry/api/build/src/api/context.js new file mode 100644 index 00000000..f6580a7a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js @@ -0,0 +1,93 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ContextAPI = void 0; +var NoopContextManager_1 = require("../context/NoopContextManager"); +var global_utils_1 = require("../internal/global-utils"); +var diag_1 = require("./diag"); +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return global_utils_1.registerGlobal(API_NAME, contextManager, diag_1.DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + return ContextAPI; +}()); +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js.map b/node_modules/@opentelemetry/api/build/src/api/context.js.map new file mode 100644 index 00000000..c6616145 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;AAEH,oEAAmE;AAEnE,yDAIkC;AAClC,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,IAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAuB,GAA9B,UAA+B,cAA8B;QAC3D,OAAO,6BAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,2BAAM,GAAb;QACE,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,yBAAI,GAAX,UACE,OAAgB,EAChB,EAAK,EACL,OAA8B;;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,CAAA,KAAA,IAAI,CAAC,kBAAkB,EAAE,CAAA,CAAC,IAAI,0BAAC,OAAO,EAAE,EAAE,EAAE,OAAO,GAAK,IAAI,GAAE;IACvE,CAAC;IAED;;;;;OAKG;IACI,yBAAI,GAAX,UAAe,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,uCAAkB,GAA1B;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,4BAAO,GAAd;QACE,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AAnED,IAmEC;AAnEY,gCAAU","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.d.ts b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts new file mode 100644 index 00000000..5c8c0bef --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts @@ -0,0 +1,38 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLogLevel } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - [Optional] The DiagLogger instance to set as the default logger. + * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO. + * @returns true if the logger was successfully registered, else false + */ + setLogger: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js b/node_modules/@opentelemetry/api/build/src/api/diag.js new file mode 100644 index 00000000..54c3d3cd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js @@ -0,0 +1,93 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagAPI = void 0; +var ComponentLogger_1 = require("../diag/ComponentLogger"); +var logLevelLogger_1 = require("../diag/internal/logLevelLogger"); +var types_1 = require("../diag/types"); +var global_utils_1 = require("../internal/global-utils"); +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + self.setLogger = function (logger, logLevel) { + var _a, _b; + if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + var oldLogger = global_utils_1.getGlobal('diag'); + var newLogger = logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger) { + var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return global_utils_1.registerGlobal('diag', newLogger, self, true); + }; + self.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js.map b/node_modules/@opentelemetry/api/build/src/api/diag.js.map new file mode 100644 index 00000000..8c9eebd8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2DAA8D;AAC9D,kEAA2E;AAC3E,uCAKuB;AACvB,yDAIkC;AAElC,IAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH;IAYE;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAM,MAAM,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAc,IAAI,EAAE;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,IAAI,CAAC,SAAS,GAAG,UACf,MAAkB,EAClB,QAA0C;;YAA1C,yBAAA,EAAA,WAAyB,oBAAY,CAAC,IAAI;YAE1C,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAM,SAAS,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;YACpC,IAAM,SAAS,GAAG,yCAAwB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC7D,kFAAkF;YAClF,IAAI,SAAS,EAAE;gBACb,IAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,6CAA2C,KAAO,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,+DAA6D,KAAO,CACrE,CAAC;aACH;YAED,OAAO,6BAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG;YACb,+BAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,UAAC,OAA+B;YAC3D,OAAO,IAAI,qCAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAtED,oDAAoD;IACtC,gBAAQ,GAAtB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IA4FH,cAAC;AAAD,CAAC,AAtGD,IAsGC;AAtGY,0BAAO","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n self.setLogger = (\n logger: DiagLogger,\n logLevel: DiagLogLevel = DiagLogLevel.INFO\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(logLevel, logger);\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - [Optional] The DiagLogger instance to set as the default logger.\n * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO.\n * @returns true if the logger was successfully registered, else false\n */\n public setLogger!: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean;\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts new file mode 100644 index 00000000..cf372cf2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts @@ -0,0 +1,48 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js b/node_modules/@opentelemetry/api/build/src/api/propagation.js new file mode 100644 index 00000000..f8ef9343 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js @@ -0,0 +1,91 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PropagationAPI = void 0; +var global_utils_1 = require("../internal/global-utils"); +var NoopTextMapPropagator_1 = require("../propagation/NoopTextMapPropagator"); +var TextMapPropagator_1 = require("../propagation/TextMapPropagator"); +var context_helpers_1 = require("../baggage/context-helpers"); +var utils_1 = require("../baggage/utils"); +var diag_1 = require("./diag"); +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return global_utils_1.registerGlobal(API_NAME, propagator, diag_1.DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = TextMapPropagator_1.defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = TextMapPropagator_1.defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js.map b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map new file mode 100644 index 00000000..31abb05f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,yDAIkC;AAClC,8EAA6E;AAC7E,sEAM0C;AAC1C,8DAIoC;AACpC,0CAAiD;AACjD,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,IAAM,wBAAwB,GAAG,IAAI,6CAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH;IAGE,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,qBAAa,CAAC;QAE9B,eAAU,GAAG,4BAAU,CAAC;QAExB,eAAU,GAAG,4BAAU,CAAC;QAExB,kBAAa,GAAG,+BAAa,CAAC;IApEd,CAAC;IAExB,uDAAuD;IACzC,0BAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAmB,GAA1B,UAA2B,UAA6B;QACtD,OAAO,6BAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,+BAAM,GAAb,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,gCAAO,GAAd,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,+BAAM,GAAb;QACE,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,gCAAO,GAAd;QACE,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAUO,6CAAoB,GAA5B;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;IACH,qBAAC;AAAD,CAAC,AA7ED,IA6EC;AA7EY,wCAAc","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.d.ts b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts new file mode 100644 index 00000000..9135b74b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts @@ -0,0 +1,39 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js b/node_modules/@opentelemetry/api/build/src/api/trace.js new file mode 100644 index 00000000..6adb3bd7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js @@ -0,0 +1,79 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceAPI = void 0; +var global_utils_1 = require("../internal/global-utils"); +var ProxyTracerProvider_1 = require("../trace/ProxyTracerProvider"); +var spancontext_utils_1 = require("../trace/spancontext-utils"); +var context_utils_1 = require("../trace/context-utils"); +var diag_1 = require("./diag"); +var API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +var TraceAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function TraceAPI() { + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; + this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; + this.deleteSpan = context_utils_1.deleteSpan; + this.getSpan = context_utils_1.getSpan; + this.getSpanContext = context_utils_1.getSpanContext; + this.setSpan = context_utils_1.setSpan; + this.setSpanContext = context_utils_1.setSpanContext; + } + /** Get the singleton instance of the Trace API */ + TraceAPI.getInstance = function () { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + }; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + TraceAPI.prototype.setGlobalTracerProvider = function (provider) { + var success = global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + }; + /** + * Returns the global tracer provider. + */ + TraceAPI.prototype.getTracerProvider = function () { + return global_utils_1.getGlobal(API_NAME) || this._proxyTracerProvider; + }; + /** + * Returns a tracer from the global tracer provider. + */ + TraceAPI.prototype.getTracer = function (name, version) { + return this.getTracerProvider().getTracer(name, version); + }; + /** Remove the global tracer provider */ + TraceAPI.prototype.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + }; + return TraceAPI; +}()); +exports.TraceAPI = TraceAPI; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js.map b/node_modules/@opentelemetry/api/build/src/api/trace.js.map new file mode 100644 index 00000000..2a49947a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,yDAIkC;AAClC,oEAAmE;AACnE,gEAGoC;AAGpC,wDAMgC;AAChC,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH;IAKE,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,mCAAe,CAAC;QAElC,uBAAkB,GAAG,sCAAkB,CAAC;QAExC,eAAU,GAAG,0BAAU,CAAC;QAExB,YAAO,GAAG,uBAAO,CAAC;QAElB,mBAAc,GAAG,8BAAc,CAAC;QAEhC,YAAO,GAAG,uBAAO,CAAC;QAElB,mBAAc,GAAG,8BAAc,CAAC;IA5DhB,CAAC;IAExB,kDAAkD;IACpC,oBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,0CAAuB,GAA9B,UAA+B,QAAwB;QACrD,IAAM,OAAO,GAAG,6BAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,cAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,oCAAiB,GAAxB;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,4BAAS,GAAhB,UAAiB,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,0BAAO,GAAd;QACE,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;IACxD,CAAC;IAeH,eAAC;AAAD,CAAC,AAnED,IAmEC;AAnEY,4BAAQ","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts new file mode 100644 index 00000000..a387685b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts @@ -0,0 +1,23 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js new file mode 100644 index 00000000..d8adc8cf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js @@ -0,0 +1,53 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deleteBaggage = exports.setBaggage = exports.getBaggage = void 0; +var context_1 = require("../context/context"); +/** + * Baggage key + */ +var BAGGAGE_KEY = context_1.createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map new file mode 100644 index 00000000..069c377b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,8CAAsD;AAItD;;GAEG;AACH,IAAM,WAAW,GAAG,0BAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAFD,gCAEC;AAED;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAFD,gCAEC;AAED;;;;GAIG;AACH,SAAgB,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC;AAFD,sCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts new file mode 100644 index 00000000..e6b45540 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js new file mode 100644 index 00000000..47bfe655 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js @@ -0,0 +1,64 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaggageImpl = void 0; +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map new file mode 100644 index 00000000..e3a998dd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH;IAGE,qBAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW;QAClB,IAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,mCAAa,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,UAAC,EAAM;gBAAL,CAAC,QAAA,EAAE,CAAC,QAAA;YAAM,OAAA,CAAC,CAAC,EAAE,CAAC,CAAC;QAAN,CAAM,CAAC,CAAC;IACrE,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW,EAAE,KAAmB;QACvC,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,iCAAW,GAAX,UAAY,GAAW;QACrB,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,mCAAa,GAAb;QAAc,cAAiB;aAAjB,UAAiB,EAAjB,qBAAiB,EAAjB,IAAiB;YAAjB,yBAAiB;;QAC7B,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAkB,UAAI,EAAJ,aAAI,EAAJ,kBAAI,EAAJ,IAAI,EAAE;YAAnB,IAAM,GAAG,aAAA;YACZ,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,2BAAK,GAAL;QACE,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC;AA3CY,kCAAW","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts new file mode 100644 index 00000000..9cd991c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js new file mode 100644 index 00000000..324c216d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js @@ -0,0 +1,23 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map new file mode 100644 index 00000000..497f3627 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH;;GAEG;AACU,QAAA,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts new file mode 100644 index 00000000..32fa0ec6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js b/node_modules/@opentelemetry/api/build/src/baggage/types.js new file mode 100644 index 00000000..c428c6d0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js.map b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map new file mode 100644 index 00000000..2a00da49 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts new file mode 100644 index 00000000..9955d9e2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js b/node_modules/@opentelemetry/api/build/src/baggage/utils.js new file mode 100644 index 00000000..782aaf70 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js @@ -0,0 +1,52 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +var diag_1 = require("../api/diag"); +var baggage_impl_1 = require("./internal/baggage-impl"); +var symbol_1 = require("./internal/symbol"); +var diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map new file mode 100644 index 00000000..f25f316a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,oCAAsC;AACtC,wDAAsD;AACtD,4CAA+D;AAG/D,IAAM,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,SAAgB,aAAa,CAC3B,OAA0C;IAA1C,wBAAA,EAAA,YAA0C;IAE1C,OAAO,IAAI,0BAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAJD,sCAIC;AAED;;;;;GAKG;AACH,SAAgB,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,uDAAqD,OAAO,GAAK,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,mCAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC;AAhBD,wEAgBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts new file mode 100644 index 00000000..19994fb2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js b/node_modules/@opentelemetry/api/build/src/common/Attributes.js new file mode 100644 index 00000000..684c93db --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map new file mode 100644 index 00000000..ff80e8e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts new file mode 100644 index 00000000..e175a7fd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js b/node_modules/@opentelemetry/api/build/src/common/Exception.js new file mode 100644 index 00000000..ed450aef --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js.map b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map new file mode 100644 index 00000000..459c350a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.d.ts b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts new file mode 100644 index 00000000..cc3c502c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js b/node_modules/@opentelemetry/api/build/src/common/Time.js new file mode 100644 index 00000000..1faaf698 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js.map b/node_modules/@opentelemetry/api/build/src/common/Time.js.map new file mode 100644 index 00000000..ae124f03 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts new file mode 100644 index 00000000..48a16597 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js new file mode 100644 index 00000000..b1e96dd7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js @@ -0,0 +1,50 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopContextManager = void 0; +var context_1 = require("./context"); +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return context_1.ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], args)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map new file mode 100644 index 00000000..3c8e2fb5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;AAEH,qCAAyC;AAGzC;IAAA;IAyBA,CAAC;IAxBC,mCAAM,GAAN;QACE,OAAO,sBAAY,CAAC;IACtB,CAAC;IAED,iCAAI,GAAJ,UACE,QAAuB,EACvB,EAAK,EACL,OAA8B;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,EAAE,CAAC,IAAI,OAAP,EAAE,iBAAM,OAAO,GAAK,IAAI,GAAE;IACnC,CAAC;IAED,iCAAI,GAAJ,UAAQ,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mCAAM,GAAN;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IAED,oCAAO,GAAP;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IACH,yBAAC;AAAD,CAAC,AAzBD,IAyBC;AAzBY,gDAAkB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.d.ts b/node_modules/@opentelemetry/api/build/src/context/context.d.ts new file mode 100644 index 00000000..8be02594 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js b/node_modules/@opentelemetry/api/build/src/context/context.js new file mode 100644 index 00000000..67334f56 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js @@ -0,0 +1,56 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js.map b/node_modules/@opentelemetry/api/build/src/context/context.js.map new file mode 100644 index 00000000..7f5dc892 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,qDAAqD;AACrD,SAAgB,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AARD,4CAQC;AAED;IAGE;;;;OAIG;IACH,qBAAY,aAAoC;QAC9C,mBAAmB;QACnB,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,IAAK,OAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,EAA7B,CAA6B,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,EAAE,KAAc;YAC1C,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,UAAC,GAAW;YAC7B,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAyBH,kBAAC;AAAD,CAAC,AApDD,IAoDC;AAED,6FAA6F;AAChF,QAAA,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.d.ts b/node_modules/@opentelemetry/api/build/src/context/types.d.ts new file mode 100644 index 00000000..7e866320 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js b/node_modules/@opentelemetry/api/build/src/context/types.js new file mode 100644 index 00000000..c428c6d0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js.map b/node_modules/@opentelemetry/api/build/src/context/types.js.map new file mode 100644 index 00000000..8bde9ce3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts new file mode 100644 index 00000000..f0609503 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js new file mode 100644 index 00000000..0a6a0e0b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js @@ -0,0 +1,80 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagComponentLogger = void 0; +var global_utils_1 = require("../internal/global-utils"); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map new file mode 100644 index 00000000..4a876170 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,yDAAqD;AAGrD;;;;;;;;GAQG;AACH;IAGE,6BAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,qCAAO,GAAd;QAAe,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IACH,0BAAC;AAAD,CAAC,AA1BD,IA0BC;AA1BY,kDAAmB;AA4BhC,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,IAAM,MAAM,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAe,IAAoC,EAAE;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts new file mode 100644 index 00000000..fa3db1e3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js new file mode 100644 index 00000000..f0ea94ef --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js @@ -0,0 +1,62 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagConsoleLogger = void 0; +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map new file mode 100644 index 00000000..3b71b2f7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,IAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH;IACE;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;IAkCH,wBAAC;AAAD,CAAC,AA3DD,IA2DC;AA3DY,8CAAiB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.d.ts b/node_modules/@opentelemetry/api/build/src/diag/index.d.ts new file mode 100644 index 00000000..11b35740 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.d.ts @@ -0,0 +1,3 @@ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.js b/node_modules/@opentelemetry/api/build/src/diag/index.js new file mode 100644 index 00000000..45c50d90 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.js @@ -0,0 +1,30 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./consoleLogger"), exports); +__exportStar(require("./types"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.js.map b/node_modules/@opentelemetry/api/build/src/diag/index.js.map new file mode 100644 index 00000000..c0a526c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/diag/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,kDAAgC;AAChC,0CAAwB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './consoleLogger';\nexport * from './types';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts new file mode 100644 index 00000000..890b9f1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js new file mode 100644 index 00000000..3adc6659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js @@ -0,0 +1,45 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createLogLevelDiagLogger = void 0; +var types_1 = require("../types"); +function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < types_1.DiagLogLevel.NONE) { + maxLevel = types_1.DiagLogLevel.NONE; + } + else if (maxLevel > types_1.DiagLogLevel.ALL) { + maxLevel = types_1.DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + var theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', types_1.DiagLogLevel.ERROR), + warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), + info: _filterFunc('info', types_1.DiagLogLevel.INFO), + debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), + }; +} +exports.createLogLevelDiagLogger = createLogLevelDiagLogger; +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map new file mode 100644 index 00000000..b56d1b23 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,kCAAqE;AAErE,SAAgB,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,oBAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,oBAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,oBAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,oBAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,IAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,oBAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC;AAhCD,4DAgCC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts new file mode 100644 index 00000000..ac71ee3b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js new file mode 100644 index 00000000..40916314 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createNoopDiagLogger = void 0; +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +exports.createNoopDiagLogger = createNoopDiagLogger; +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map new file mode 100644 index 00000000..20e0e81e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,SAAgB,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC;AARD,oDAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.d.ts b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts new file mode 100644 index 00000000..92b0c578 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts @@ -0,0 +1,70 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js b/node_modules/@opentelemetry/api/build/src/diag/types.js new file mode 100644 index 00000000..c195e45e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js @@ -0,0 +1,44 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js.map b/node_modules/@opentelemetry/api/build/src/diag/types.js.map new file mode 100644 index 00000000..22dc8e21 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AA+CH;;;;GAIG;AACH,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.d.ts b/node_modules/@opentelemetry/api/build/src/index.d.ts new file mode 100644 index 00000000..33ff2300 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.d.ts @@ -0,0 +1,57 @@ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +export type { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +import { TraceAPI } from './api/trace'; +export type { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +import { PropagationAPI } from './api/propagation'; +export type { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +import { DiagAPI } from './api/diag'; +export type { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +declare const _default: { + trace: TraceAPI; + context: ContextAPI; + propagation: PropagationAPI; + diag: DiagAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js b/node_modules/@opentelemetry/api/build/src/index.js new file mode 100644 index 00000000..189bb606 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js @@ -0,0 +1,88 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.diag = exports.propagation = exports.trace = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.baggageEntryMetadataFromString = void 0; +__exportStar(require("./baggage/types"), exports); +var utils_1 = require("./baggage/utils"); +Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +__exportStar(require("./common/Exception"), exports); +__exportStar(require("./common/Time"), exports); +__exportStar(require("./common/Attributes"), exports); +__exportStar(require("./diag"), exports); +__exportStar(require("./propagation/TextMapPropagator"), exports); +__exportStar(require("./trace/attributes"), exports); +__exportStar(require("./trace/link"), exports); +__exportStar(require("./trace/ProxyTracer"), exports); +__exportStar(require("./trace/ProxyTracerProvider"), exports); +__exportStar(require("./trace/Sampler"), exports); +__exportStar(require("./trace/SamplingResult"), exports); +__exportStar(require("./trace/span_context"), exports); +__exportStar(require("./trace/span_kind"), exports); +__exportStar(require("./trace/span"), exports); +__exportStar(require("./trace/SpanOptions"), exports); +__exportStar(require("./trace/status"), exports); +__exportStar(require("./trace/trace_flags"), exports); +__exportStar(require("./trace/trace_state"), exports); +var utils_2 = require("./trace/internal/utils"); +Object.defineProperty(exports, "createTraceState", { enumerable: true, get: function () { return utils_2.createTraceState; } }); +__exportStar(require("./trace/tracer_provider"), exports); +__exportStar(require("./trace/tracer"), exports); +__exportStar(require("./trace/tracer_options"), exports); +var spancontext_utils_1 = require("./trace/spancontext-utils"); +Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } }); +Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); +Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); +var invalid_span_constants_1 = require("./trace/invalid-span-constants"); +Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } }); +Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); +__exportStar(require("./context/context"), exports); +__exportStar(require("./context/types"), exports); +var context_1 = require("./api/context"); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +var trace_1 = require("./api/trace"); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +var propagation_1 = require("./api/propagation"); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +var diag_1 = require("./api/diag"); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +exports.default = { + trace: exports.trace, + context: exports.context, + propagation: exports.propagation, + diag: exports.diag, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js.map b/node_modules/@opentelemetry/api/build/src/index.js.map new file mode 100644 index 00000000..9f89a0c5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;AAEH,kDAAgC;AAChC,yCAAiE;AAAxD,uHAAA,8BAA8B,OAAA;AACvC,qDAAmC;AACnC,gDAA8B;AAC9B,sDAAoC;AACpC,yCAAuB;AACvB,kEAAgD;AAChD,qDAAmC;AACnC,+CAA6B;AAC7B,sDAAoC;AACpC,8DAA4C;AAC5C,kDAAgC;AAChC,yDAAuC;AACvC,uDAAqC;AACrC,oDAAkC;AAClC,+CAA6B;AAC7B,sDAAoC;AACpC,iDAA+B;AAC/B,sDAAoC;AACpC,sDAAoC;AACpC,gDAA0D;AAAjD,yGAAA,gBAAgB,OAAA;AACzB,0DAAwC;AACxC,iDAA+B;AAC/B,yDAAuC;AAEvC,+DAImC;AAHjC,uHAAA,kBAAkB,OAAA;AAClB,mHAAA,cAAc,OAAA;AACd,kHAAA,aAAa,OAAA;AAGf,yEAIwC;AAHtC,wHAAA,cAAc,OAAA;AACd,yHAAA,eAAe,OAAA;AACf,8HAAA,oBAAoB,OAAA;AAGtB,oDAAkC;AAClC,kDAAgC;AAEhC,yCAA2C;AAE3C,iCAAiC;AACpB,QAAA,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC;AAEhD,qCAAuC;AAEvC,+BAA+B;AAClB,QAAA,KAAK,GAAG,gBAAQ,CAAC,WAAW,EAAE,CAAC;AAE5C,iDAAmD;AAEnD,qCAAqC;AACxB,QAAA,WAAW,GAAG,4BAAc,CAAC,WAAW,EAAE,CAAC;AAExD,mCAAqC;AAGrC;;;;;GAKG;AACU,QAAA,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC;AAEvC,kBAAe;IACb,KAAK,eAAA;IACL,OAAO,iBAAA;IACP,WAAW,qBAAA;IACX,IAAI,cAAA;CACL,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport * from './common/Exception';\nexport * from './common/Time';\nexport * from './common/Attributes';\nexport * from './diag';\nexport * from './propagation/TextMapPropagator';\nexport * from './trace/attributes';\nexport * from './trace/link';\nexport * from './trace/ProxyTracer';\nexport * from './trace/ProxyTracerProvider';\nexport * from './trace/Sampler';\nexport * from './trace/SamplingResult';\nexport * from './trace/span_context';\nexport * from './trace/span_kind';\nexport * from './trace/span';\nexport * from './trace/SpanOptions';\nexport * from './trace/status';\nexport * from './trace/trace_flags';\nexport * from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport * from './trace/tracer_provider';\nexport * from './trace/tracer';\nexport * from './trace/tracer_options';\n\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\n\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\n\nexport * from './context/context';\nexport * from './context/types';\n\nimport { ContextAPI } from './api/context';\nexport type { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n\nimport { TraceAPI } from './api/trace';\nexport type { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n\nimport { PropagationAPI } from './api/propagation';\nexport type { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n\nimport { DiagAPI } from './api/diag';\nexport type { DiagAPI } from './api/diag';\n\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n\nexport default {\n trace,\n context,\n propagation,\n diag,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts new file mode 100644 index 00000000..bb90097f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts @@ -0,0 +1,16 @@ +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js new file mode 100644 index 00000000..4ce231ff --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js @@ -0,0 +1,65 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +var platform_1 = require("../platform"); +var version_1 = require("../version"); +var semver_1 = require("./semver"); +var major = version_1.VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error('@opentelemetry/api: All API registration versions must match'); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + version_1.VERSION + "."); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !semver_1.isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + version_1.VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map new file mode 100644 index 00000000..d8aca3b4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,wCAA0C;AAG1C,sCAAqC;AACrC,mCAAwC;AAExC,IAAM,KAAK,GAAG,iBAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,0BAAwB,KAAO,CAChC,CAAC;AAEF,IAAM,OAAO,GAAG,sBAAyB,CAAC;AAE1C,SAAgB,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAqB;;IAArB,8BAAA,EAAA,qBAAqB;IAErB,IAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,iBAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kEAAgE,IAAM,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,iBAAO,EAAE;QAC3B,0DAA0D;QAC1D,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,8DAA8D,CAC/D,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,iDAA+C,IAAI,UAAK,iBAAO,MAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AApCD,wCAoCC;AAED,SAAgB,SAAS,CACvB,IAAU;;IAEV,IAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,qBAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AARD,8BAQC;AAED,SAAgB,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,oDAAkD,IAAI,UAAK,iBAAO,MAAG,CACtE,CAAC;IACF,IAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC;AATD,4CASC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n '@opentelemetry/api: All API registration versions must match'\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts new file mode 100644 index 00000000..d9f4259e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js b/node_modules/@opentelemetry/api/build/src/internal/semver.js new file mode 100644 index 00000000..b56392ae --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js @@ -0,0 +1,122 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isCompatible = exports._makeCompatibilityCheck = void 0; +var version_1 = require("../version"); +var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +function _makeCompatibilityCheck(ownVersion) { + var acceptedVersions = new Set([ownVersion]); + var rejectedVersions = new Set(); + var myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return function () { return false; }; + } + var ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + var globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + var globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +exports._makeCompatibilityCheck = _makeCompatibilityCheck; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js.map b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map new file mode 100644 index 00000000..c90c00dd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sCAAqC;AAErC,IAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,uBAAuB,CACrC,UAAkB;IAElB,IAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,IAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,IAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;KACpB;IAED,IAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,IAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAtFD,0DAsFC;AAED;;;;;;;;;;;;;;GAcG;AACU,QAAA,YAAY,GAAG,uBAAuB,CAAC,iBAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts new file mode 100644 index 00000000..e73fd73e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js new file mode 100644 index 00000000..747610cd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +// Updates to this file should also be replicated to @opentelemetry/api-metrics and +// @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +exports._globalThis = typeof globalThis === 'object' ? globalThis : + typeof self === 'object' ? self : + typeof window === 'object' ? window : + typeof global === 'object' ? global : + {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map new file mode 100644 index 00000000..6a0e64ea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,mFAAmF;AACnF,2BAA2B;AAE3B;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AACjE,QAAA,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;IAC7C,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACjC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;YACrC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;gBACrC,EAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/api-metrics and\n// @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object' ? globalThis :\n typeof self === 'object' ? self :\n typeof window === 'object' ? window :\n typeof global === 'object' ? global :\n {} as typeof globalThis;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts new file mode 100644 index 00000000..ba20e123 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js new file mode 100644 index 00000000..99fd57c8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map new file mode 100644 index 00000000..5a406a99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts new file mode 100644 index 00000000..90595da9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js b/node_modules/@opentelemetry/api/build/src/platform/index.js new file mode 100644 index 00000000..33b834db --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./node"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/index.js.map new file mode 100644 index 00000000..bc13701f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,yCAAuB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts new file mode 100644 index 00000000..e3c83e5d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js new file mode 100644 index 00000000..82c4e394 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js @@ -0,0 +1,22 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map new file mode 100644 index 00000000..2f2ca822 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,gEAAgE;AAChE,oEAAoE;AACvD,QAAA,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts new file mode 100644 index 00000000..ba20e123 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js b/node_modules/@opentelemetry/api/build/src/platform/node/index.js new file mode 100644 index 00000000..99fd57c8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map new file mode 100644 index 00000000..95561e98 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 00000000..398021f3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js new file mode 100644 index 00000000..123adedd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js @@ -0,0 +1,37 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTextMapPropagator = void 0; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +var NoopTextMapPropagator = /** @class */ (function () { + function NoopTextMapPropagator() { + } + /** Noop inject function does nothing */ + NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; + /** Noop extract function does nothing and returns the input context */ + NoopTextMapPropagator.prototype.extract = function (context, _carrier) { + return context; + }; + NoopTextMapPropagator.prototype.fields = function () { + return []; + }; + return NoopTextMapPropagator; +}()); +exports.NoopTextMapPropagator = NoopTextMapPropagator; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 00000000..b893e15d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH;;GAEG;AACH;IAAA;IAUA,CAAC;IATC,wCAAwC;IACxC,sCAAM,GAAN,UAAO,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,uCAAO,GAAP,UAAQ,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,sCAAM,GAAN;QACE,OAAO,EAAE,CAAC;IACZ,CAAC;IACH,4BAAC;AAAD,CAAC,AAVD,IAUC;AAVY,sDAAqB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts new file mode 100644 index 00000000..dc39367b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js new file mode 100644 index 00000000..88073415 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js @@ -0,0 +1,41 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; +exports.defaultTextMapGetter = { + get: function (carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys: function (carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +exports.defaultTextMapSetter = { + set: function (carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map new file mode 100644 index 00000000..06abd5a0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAkGU,QAAA,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,YAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEW,QAAA,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts new file mode 100644 index 00000000..883cf914 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js new file mode 100644 index 00000000..d5f537a3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js @@ -0,0 +1,65 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NonRecordingSpan = void 0; +var invalid_span_constants_1 = require("./invalid-span-constants"); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map new file mode 100644 index 00000000..7a105a14 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,mEAAgE;AAKhE;;;;GAIG;AACH;IACE,0BACmB,YAAgD;QAAhD,6BAAA,EAAA,eAA4B,6CAAoB;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,sCAAW,GAAX;QACE,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,uCAAY,GAAZ,UAAa,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,wCAAa,GAAb,UAAc,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,mCAAQ,GAAR,UAAS,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,oCAAS,GAAT,UAAU,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,qCAAU,GAAV,UAAW,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,8BAAG,GAAH,UAAI,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,sCAAW,GAAX;QACE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,0CAAe,GAAf,UAAgB,UAAqB,EAAE,KAAiB,IAAS,CAAC;IACpE,uBAAC;AAAD,CAAC,AA7CD,IA6CC;AA7CY,4CAAgB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts new file mode 100644 index 00000000..0e059c99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js new file mode 100644 index 00000000..5c1b26fa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js @@ -0,0 +1,78 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracer = void 0; +var context_1 = require("../api/context"); +var context_utils_1 = require("../trace/context-utils"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +var spancontext_utils_1 = require("./spancontext-utils"); +var context = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + var parentFromContext = context && context_utils_1.getSpanContext(context); + if (isSpanContext(parentFromContext) && + spancontext_utils_1.isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : context.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = context_utils_1.setSpan(parentContext, span); + return context.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map new file mode 100644 index 00000000..8559ee79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,0CAA4C;AAE5C,wDAAiE;AACjE,uDAAsD;AAEtD,yDAAyD;AAKzD,IAAM,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC;AAEzC;;GAEG;AACH;IAAA;IAgEA,CAAC;IA/DC,gCAAgC;IAChC,8BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,IAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;QAED,IAAM,iBAAiB,GAAG,OAAO,IAAI,8BAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,sCAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,mCAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,oCAAe,GAAf,UACE,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,IAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QAC9C,IAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,IAAM,kBAAkB,GAAG,uBAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,OAAO,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IACH,iBAAC;AAAD,CAAC,AAhED,IAgEC;AAhEY,gCAAU;AAkEvB,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst context = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? context.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return context.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts new file mode 100644 index 00000000..ec0fe792 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js new file mode 100644 index 00000000..d08649dd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracerProvider = void 0; +var NoopTracer_1 = require("./NoopTracer"); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { + return new NoopTracer_1.NoopTracer(); + }; + return NoopTracerProvider; +}()); +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map new file mode 100644 index 00000000..a379756d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2CAA0C;AAK1C;;;;;GAKG;AACH;IAAA;IAQA,CAAC;IAPC,sCAAS,GAAT,UACE,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,uBAAU,EAAE,CAAC;IAC1B,CAAC;IACH,yBAAC;AAAD,CAAC,AARD,IAQC;AARY,gDAAkB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts new file mode 100644 index 00000000..116cc5c1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js new file mode 100644 index 00000000..530a3790 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js @@ -0,0 +1,56 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracer = void 0; +var NoopTracer_1 = require("./NoopTracer"); +var NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map new file mode 100644 index 00000000..b8e4b53f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,2CAA0C;AAM1C,IAAM,WAAW,GAAG,IAAI,uBAAU,EAAE,CAAC;AAErC;;GAEG;AACH;IAIE,qBACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,+BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,qCAAe,GAAf,UACE,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,IAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,gCAAU,GAAlB;QACE,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,IAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEvF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC;AA3CY,kCAAW","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options);\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts new file mode 100644 index 00000000..ee7eafa9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js new file mode 100644 index 00000000..32debbba --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js @@ -0,0 +1,57 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracerProvider = void 0; +var ProxyTracer_1 = require("./ProxyTracer"); +var NoopTracerProvider_1 = require("./NoopTracerProvider"); +var NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + }; + return ProxyTracerProvider; +}()); +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map new file mode 100644 index 00000000..4140662c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,6CAA4C;AAC5C,2DAA0D;AAG1D,IAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH;IAAA;IA+BA,CAAC;IA5BC;;OAEG;IACH,uCAAS,GAAT,UAAU,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,yBAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,yCAAW,GAAX;;QACE,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,yCAAW,GAAX,UAAY,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,+CAAiB,GAAjB,UACE,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IACH,0BAAC;AAAD,CAAC,AA/BD,IA+BC;AA/BY,kDAAmB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts new file mode 100644 index 00000000..dd8dc811 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts @@ -0,0 +1,30 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js new file mode 100644 index 00000000..6034482e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map new file mode 100644 index 00000000..bb6528e1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts new file mode 100644 index 00000000..93948465 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts @@ -0,0 +1,39 @@ +import { SpanAttributes } from './attributes'; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js new file mode 100644 index 00000000..d102e82b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js @@ -0,0 +1,41 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SamplingDecision = void 0; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map new file mode 100644 index 00000000..7a850e99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH;;;GAGG;AACH,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,GAAhB,wBAAgB,KAAhB,wBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\n\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts new file mode 100644 index 00000000..c8045689 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js new file mode 100644 index 00000000..cb582305 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map new file mode 100644 index 00000000..049f6858 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts new file mode 100644 index 00000000..a2a5d2a2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js b/node_modules/@opentelemetry/api/build/src/trace/attributes.js new file mode 100644 index 00000000..c6eb97a3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map new file mode 100644 index 00000000..4de58c42 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts new file mode 100644 index 00000000..1d46619b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts @@ -0,0 +1,37 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js new file mode 100644 index 00000000..a56c73d0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js @@ -0,0 +1,74 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getSpan = void 0; +var context_1 = require("../context/context"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +/** + * span key + */ +var SPAN_KEY = context_1.createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +exports.deleteSpan = deleteSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +exports.getSpanContext = getSpanContext; +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map new file mode 100644 index 00000000..d9775855 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,8CAAsD;AAItD,uDAAsD;AAEtD;;GAEG;AACH,IAAM,QAAQ,GAAG,0BAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,SAAgB,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAFD,0BAEC;AAED;;;;;GAKG;AACH,SAAgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAFD,0BAEC;AAED;;;;GAIG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAFD,gCAEC;AAED;;;;;;GAMG;AACH,SAAgB,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AALD,wCAKC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC;AAFD,wCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts new file mode 100644 index 00000000..9ed5ecb7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js new file mode 100644 index 00000000..576cb89a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js @@ -0,0 +1,105 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceStateImpl = void 0; +var tracestate_validators_1 = require("./tracestate-validators"); +var MAX_TRACE_STATE_ITEMS = 32; +var MAX_TRACE_STATE_LEN = 512; +var LIST_MEMBERS_SEPARATOR = ','; +var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +var TraceStateImpl = /** @class */ (function () { + function TraceStateImpl(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + TraceStateImpl.prototype.set = function (key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + var traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + }; + TraceStateImpl.prototype.unset = function (key) { + var traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + }; + TraceStateImpl.prototype.get = function (key) { + return this._internalState.get(key); + }; + TraceStateImpl.prototype.serialize = function () { + var _this = this; + return this._keys() + .reduce(function (agg, key) { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + }; + TraceStateImpl.prototype._parse = function (rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce(function (agg, part) { + var listMember = part.trim(); // Optional Whitespace (OWS) handling + var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + var key = listMember.slice(0, i); + var value = listMember.slice(i + 1, part.length); + if (tracestate_validators_1.validateKey(key) && tracestate_validators_1.validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + }; + TraceStateImpl.prototype._keys = function () { + return Array.from(this._internalState.keys()).reverse(); + }; + TraceStateImpl.prototype._clone = function () { + var traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + }; + return TraceStateImpl; +}()); +exports.TraceStateImpl = TraceStateImpl; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map new file mode 100644 index 00000000..dccaa908 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,iEAAqE;AAErE,IAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,IAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,IAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,IAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH;IAGE,wBAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,8BAAK,GAAL,UAAM,GAAW;QACf,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,kCAAS,GAAT;QAAA,iBAOC;QANC,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,UAAC,GAAa,EAAE,GAAG;YACzB,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,KAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,+BAAM,GAAd,UAAe,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,UAAC,GAAwB,EAAE,IAAY;YAC7C,IAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,IAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,IAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,mCAAW,CAAC,GAAG,CAAC,IAAI,qCAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,8BAAK,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,+BAAM,GAAd;QACE,IAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,qBAAC;AAAD,CAAC,AA5ED,IA4EC;AA5EY,wCAAc","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts new file mode 100644 index 00000000..4917f99d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js new file mode 100644 index 00000000..008a4fde --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js @@ -0,0 +1,46 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateValue = exports.validateKey = void 0; +var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; +var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; +var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); +var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +exports.validateKey = validateKey; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +exports.validateValue = validateValue; +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map new file mode 100644 index 00000000..fc5cee18 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,IAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,IAAM,SAAS,GAAG,UAAQ,oBAAoB,YAAS,CAAC;AACxD,IAAM,gBAAgB,GAAG,aAAW,oBAAoB,qBAAgB,oBAAoB,WAAQ,CAAC;AACrG,IAAM,eAAe,GAAG,IAAI,MAAM,CAAC,SAAO,SAAS,SAAI,gBAAgB,OAAI,CAAC,CAAC;AAC7E,IAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,IAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAFD,kCAEC;AAED;;;GAGG;AACH,SAAgB,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC;AALD,sCAKC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts new file mode 100644 index 00000000..e3b51fe4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js new file mode 100644 index 00000000..93c9b310 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTraceState = void 0; +var tracestate_impl_1 = require("./tracestate-impl"); +function createTraceState(rawTraceState) { + return new tracestate_impl_1.TraceStateImpl(rawTraceState); +} +exports.createTraceState = createTraceState; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map new file mode 100644 index 00000000..b543e48f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,qDAAmD;AAGnD,SAAgB,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,gCAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC;AAFD,4CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts new file mode 100644 index 00000000..e32dab9d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js new file mode 100644 index 00000000..ad0ab17e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js @@ -0,0 +1,27 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +var trace_flags_1 = require("./trace_flags"); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map new file mode 100644 index 00000000..ecc67410 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,6CAA2C;AAE9B,QAAA,cAAc,GAAG,kBAAkB,CAAC;AACpC,QAAA,eAAe,GAAG,kCAAkC,CAAC;AACrD,QAAA,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,uBAAe;IACxB,MAAM,EAAE,sBAAc;IACtB,UAAU,EAAE,wBAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.d.ts b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts new file mode 100644 index 00000000..d1326f51 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts @@ -0,0 +1,24 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js b/node_modules/@opentelemetry/api/build/src/trace/link.js new file mode 100644 index 00000000..8036a634 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js.map b/node_modules/@opentelemetry/api/build/src/trace/link.js.map new file mode 100644 index 00000000..18c0a813 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts new file mode 100644 index 00000000..c5f28141 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js b/node_modules/@opentelemetry/api/build/src/trace/span.js new file mode 100644 index 00000000..b50af462 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js.map b/node_modules/@opentelemetry/api/build/src/trace/span.js.map new file mode 100644 index 00000000..f24165b7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts new file mode 100644 index 00000000..f30933a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js b/node_modules/@opentelemetry/api/build/src/trace/span_context.js new file mode 100644 index 00000000..4b7976ce --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map new file mode 100644 index 00000000..005386d6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts new file mode 100644 index 00000000..a89846f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js new file mode 100644 index 00000000..9c06e2c2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanKind = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map new file mode 100644 index 00000000..c0ba360f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts new file mode 100644 index 00000000..f1911114 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js new file mode 100644 index 00000000..2c94969b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var invalid_span_constants_1 = require("./invalid-span-constants"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map new file mode 100644 index 00000000..45374a10 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,mEAA2E;AAC3E,uDAAsD;AAItD,IAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,IAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,SAAgB,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,wCAAe,CAAC;AAC1E,CAAC;AAFD,wCAEC;AAED,SAAgB,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,uCAAc,CAAC;AACtE,CAAC;AAFD,sCAEC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAJD,gDAIC;AAED;;;;;GAKG;AACH,SAAgB,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC;AAFD,0CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.d.ts b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts new file mode 100644 index 00000000..ab19a68f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js b/node_modules/@opentelemetry/api/build/src/trace/status.js new file mode 100644 index 00000000..50cbdef8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanStatusCode = void 0; +/** + * An enumeration of status codes. + */ +var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js.map b/node_modules/@opentelemetry/api/build/src/trace/status.js.map new file mode 100644 index 00000000..b921cae5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":";;;AAsBA;;GAEG;AACH,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,GAAd,sBAAc,KAAd,sBAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts new file mode 100644 index 00000000..11288ba9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js new file mode 100644 index 00000000..f8d4dd8a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceFlags = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map new file mode 100644 index 00000000..965ce794 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts new file mode 100644 index 00000000..f275b8be --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js new file mode 100644 index 00000000..13970381 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map new file mode 100644 index 00000000..267f2e82 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts new file mode 100644 index 00000000..25090899 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js b/node_modules/@opentelemetry/api/build/src/trace/tracer.js new file mode 100644 index 00000000..d710ef9a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map new file mode 100644 index 00000000..3c3c591a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts new file mode 100644 index 00000000..f3bbccfc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js new file mode 100644 index 00000000..3547251a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map new file mode 100644 index 00000000..a7432527 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts new file mode 100644 index 00000000..9b2f7a95 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js new file mode 100644 index 00000000..4c511db9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map new file mode 100644 index 00000000..31f334aa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.d.ts b/node_modules/@opentelemetry/api/build/src/version.d.ts new file mode 100644 index 00000000..28a7146c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.1.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js b/node_modules/@opentelemetry/api/build/src/version.js new file mode 100644 index 00000000..ab2ae8e7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js @@ -0,0 +1,21 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VERSION = void 0; +// this is autogenerated file, see scripts/version-update.js +exports.VERSION = '1.1.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js.map b/node_modules/@opentelemetry/api/build/src/version.js.map new file mode 100644 index 00000000..6079687f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,4DAA4D;AAC/C,QAAA,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.1.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/package.json b/node_modules/@opentelemetry/api/package.json new file mode 100644 index 00000000..0bfdbc9a --- /dev/null +++ b/node_modules/@opentelemetry/api/package.json @@ -0,0 +1,90 @@ +{ + "name": "@opentelemetry/api", + "version": "1.1.0", + "description": "Public API for OpenTelemetry", + "main": "build/src/index.js", + "module": "build/esm/index.js", + "types": "build/src/index.d.ts", + "browser": { + "./src/platform/index.ts": "./src/platform/browser/index.ts", + "./build/esm/platform/index.js": "./build/esm/platform/browser/index.js", + "./build/src/platform/index.js": "./build/src/platform/browser/index.js" + }, + "repository": "https://github.com/open-telemetry/opentelemetry-js-api.git", + "scripts": { + "clean": "tsc --build --clean tsconfig.json tsconfig.esm.json", + "codecov:browser": "nyc report --reporter=json && codecov -f coverage/*.json -p .", + "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p .", + "precompile": "npm run version", + "compile": "tsc --build tsconfig.json tsconfig.esm.json", + "docs": "typedoc", + "docs:deploy": "gh-pages --dist docs/out", + "docs:test": "linkinator docs/out --silent && linkinator docs/*.md *.md --markdown --silent", + "lint:fix": "eslint src test --ext .ts --fix", + "lint": "eslint src test --ext .ts", + "test:browser": "nyc karma start --single-run", + "test": "nyc ts-mocha -p tsconfig.json test/**/*.test.ts", + "cycle-check": "dpdm --exit-code circular:1 src/index.ts", + "version": "node scripts/version-update.js", + "prewatch": "npm run version", + "watch": "tsc --build --watch" + }, + "keywords": [ + "opentelemetry", + "nodejs", + "browser", + "tracing", + "profiling", + "stats", + "monitoring" + ], + "author": "OpenTelemetry Authors", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + }, + "files": [ + "build/esm/**/*.js", + "build/esm/**/*.js.map", + "build/esm/**/*.d.ts", + "build/src/**/*.js", + "build/src/**/*.js.map", + "build/src/**/*.d.ts", + "LICENSE", + "README.md" + ], + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@types/mocha": "8.2.2", + "@types/node": "14.17.4", + "@types/sinon": "10.0.2", + "@types/webpack-env": "1.16.0", + "@typescript-eslint/eslint-plugin": "5.0.0", + "@typescript-eslint/parser": "5.0.0", + "codecov": "3.8.2", + "dpdm": "3.7.1", + "eslint": "7.32.0", + "eslint-plugin-header": "3.1.1", + "eslint-plugin-node": "11.1.0", + "gh-pages": "3.2.0", + "istanbul-instrumenter-loader": "3.0.1", + "karma": "5.2.3", + "karma-chrome-launcher": "3.1.0", + "karma-coverage-istanbul-reporter": "3.0.3", + "karma-mocha": "2.0.1", + "karma-spec-reporter": "0.0.32", + "karma-webpack": "4.0.2", + "lerna-changelog": "1.0.1", + "linkinator": "2.13.6", + "mocha": "7.2.0", + "nyc": "15.1.0", + "sinon": "11.1.1", + "ts-loader": "8.2.0", + "ts-mocha": "8.0.0", + "typedoc": "0.21.2", + "typescript": "4.3.5", + "webpack": "4.46.0" + } +} diff --git a/node_modules/@types/node-fetch/LICENSE b/node_modules/@types/node-fetch/LICENSE new file mode 100755 index 00000000..9e841e7a --- /dev/null +++ b/node_modules/@types/node-fetch/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node-fetch/README.md b/node_modules/@types/node-fetch/README.md new file mode 100755 index 00000000..b57a745a --- /dev/null +++ b/node_modules/@types/node-fetch/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/node-fetch` + +# Summary +This package contains type definitions for node-fetch (https://github.com/bitinn/node-fetch). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch. + +### Additional Details + * Last updated: Tue, 15 Feb 2022 20:01:33 GMT + * Dependencies: [@types/form-data](https://npmjs.com/package/@types/form-data), [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [Torsten Werner](https://github.com/torstenwerner), [Niklas Lindgren](https://github.com/nikcorg), [Vinay Bedre](https://github.com/vinaybedre), [Antonio Román](https://github.com/kyranet), [Andrew Leedham](https://github.com/AndrewLeedham), [Jason Li](https://github.com/JasonLi914), [Steve Faulkner](https://github.com/southpolesteve), [ExE Boss](https://github.com/ExE-Boss), [Alex Savin](https://github.com/alexandrusavin), [Alexis Tyler](https://github.com/OmgImAlexis), [Jakub Kisielewski](https://github.com/kbkk), and [David Glasser](https://github.com/glasser). diff --git a/node_modules/@types/node-fetch/externals.d.ts b/node_modules/@types/node-fetch/externals.d.ts new file mode 100755 index 00000000..8107a183 --- /dev/null +++ b/node_modules/@types/node-fetch/externals.d.ts @@ -0,0 +1,21 @@ +// `AbortSignal` is defined here to prevent a dependency on a particular +// implementation like the `abort-controller` package, and to avoid requiring +// the `dom` library in `tsconfig.json`. + +export interface AbortSignal { + aborted: boolean; + + addEventListener: (type: "abort", listener: ((this: AbortSignal, event: any) => any), options?: boolean | { + capture?: boolean | undefined, + once?: boolean | undefined, + passive?: boolean | undefined + }) => void; + + removeEventListener: (type: "abort", listener: ((this: AbortSignal, event: any) => any), options?: boolean | { + capture?: boolean | undefined + }) => void; + + dispatchEvent: (event: any) => boolean; + + onabort: null | ((this: AbortSignal, event: any) => void); +} diff --git a/node_modules/@types/node-fetch/index.d.ts b/node_modules/@types/node-fetch/index.d.ts new file mode 100755 index 00000000..346d0b2d --- /dev/null +++ b/node_modules/@types/node-fetch/index.d.ts @@ -0,0 +1,224 @@ +// Type definitions for node-fetch 2.6 +// Project: https://github.com/bitinn/node-fetch +// Definitions by: Torsten Werner +// Niklas Lindgren +// Vinay Bedre +// Antonio Román +// Andrew Leedham +// Jason Li +// Steve Faulkner +// ExE Boss +// Alex Savin +// Alexis Tyler +// Jakub Kisielewski +// David Glasser +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// + +import FormData = require('form-data'); +import { RequestOptions } from "http"; +import { URLSearchParams, URL } from "url"; +import { AbortSignal } from "./externals"; + +export class Request extends Body { + constructor(input: RequestInfo, init?: RequestInit); + clone(): Request; + context: RequestContext; + headers: Headers; + method: string; + redirect: RequestRedirect; + referrer: string; + url: string; + + // node-fetch extensions to the whatwg/fetch spec + agent?: RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']); + compress: boolean; + counter: number; + follow: number; + hostname: string; + port?: number | undefined; + protocol: string; + size: number; + timeout: number; +} + +export interface RequestInit { + // whatwg/fetch standard options + body?: BodyInit | undefined; + headers?: HeadersInit | undefined; + method?: string | undefined; + redirect?: RequestRedirect | undefined; + signal?: AbortSignal | null | undefined; + + // node-fetch extensions + agent?: RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']); // =null http.Agent instance, allows custom proxy, certificate etc. + compress?: boolean | undefined; // =true support gzip/deflate content encoding. false to disable + follow?: number | undefined; // =20 maximum redirect count. 0 to not follow redirect + size?: number | undefined; // =0 maximum response body size in bytes. 0 to disable + timeout?: number | undefined; // =0 req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies) + + // node-fetch does not support mode, cache or credentials options +} + +export type RequestContext = + "audio" + | "beacon" + | "cspreport" + | "download" + | "embed" + | "eventsource" + | "favicon" + | "fetch" + | "font" + | "form" + | "frame" + | "hyperlink" + | "iframe" + | "image" + | "imageset" + | "import" + | "internal" + | "location" + | "manifest" + | "object" + | "ping" + | "plugin" + | "prefetch" + | "script" + | "serviceworker" + | "sharedworker" + | "style" + | "subresource" + | "track" + | "video" + | "worker" + | "xmlhttprequest" + | "xslt"; +export type RequestMode = "cors" | "no-cors" | "same-origin"; +export type RequestRedirect = "error" | "follow" | "manual"; +export type RequestCredentials = "omit" | "include" | "same-origin"; + +export type RequestCache = + "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; + +export class Headers implements Iterable<[string, string]> { + constructor(init?: HeadersInit); + forEach(callback: (value: string, name: string) => void): void; + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + raw(): { [k: string]: string[] }; + set(name: string, value: string): void; + + // Iterable methods + entries(): IterableIterator<[string, string]>; + keys(): IterableIterator; + values(): IterableIterator; + [Symbol.iterator](): Iterator<[string, string]>; +} + +type BlobPart = ArrayBuffer | ArrayBufferView | Blob | string; + +interface BlobOptions { + type?: string | undefined; + endings?: "transparent" | "native" | undefined; +} + +export class Blob { + constructor(blobParts?: BlobPart[], options?: BlobOptions); + readonly type: string; + readonly size: number; + slice(start?: number, end?: number): Blob; + text(): Promise; +} + +export class Body { + constructor(body?: any, opts?: { size?: number | undefined; timeout?: number | undefined }); + arrayBuffer(): Promise; + blob(): Promise; + body: NodeJS.ReadableStream; + bodyUsed: boolean; + buffer(): Promise; + json(): Promise; + size: number; + text(): Promise; + textConverted(): Promise; + timeout: number; +} + +interface SystemError extends Error { + code?: string | undefined; +} + +export class FetchError extends Error { + name: "FetchError"; + constructor(message: string, type: string, systemError?: SystemError); + type: string; + code?: string | undefined; + errno?: string | undefined; +} + +export class Response extends Body { + constructor(body?: BodyInit, init?: ResponseInit); + static error(): Response; + static redirect(url: string, status: number): Response; + clone(): Response; + headers: Headers; + ok: boolean; + redirected: boolean; + status: number; + statusText: string; + type: ResponseType; + url: string; +} + +export type ResponseType = + "basic" + | "cors" + | "default" + | "error" + | "opaque" + | "opaqueredirect"; + +export interface ResponseInit { + headers?: HeadersInit | undefined; + size?: number | undefined; + status?: number | undefined; + statusText?: string | undefined; + timeout?: number | undefined; + url?: string | undefined; +} + +interface URLLike { + href: string; +} + +export type HeadersInit = Headers | string[][] | { [key: string]: string }; +// HeaderInit is exported to support backwards compatibility. See PR #34382 +export type HeaderInit = HeadersInit; +export type BodyInit = + ArrayBuffer + | ArrayBufferView + | NodeJS.ReadableStream + | string + | URLSearchParams + | FormData; +export type RequestInfo = string | URLLike | Request; + +declare function fetch( + url: RequestInfo, + init?: RequestInit +): Promise; + +declare namespace fetch { + function isRedirect(code: number): boolean; +} + +export default fetch; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/License b/node_modules/@types/node-fetch/node_modules/form-data/License new file mode 100644 index 00000000..c7ff12a2 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak new file mode 100644 index 00000000..f06d86cb --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak @@ -0,0 +1,356 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v3.0.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/Readme.md b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md new file mode 100644 index 00000000..f06d86cb --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md @@ -0,0 +1,356 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v3.0.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts new file mode 100644 index 00000000..295e9e9b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js new file mode 100644 index 00000000..09e7c70e --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js new file mode 100644 index 00000000..cf836b0b --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js @@ -0,0 +1,498 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js new file mode 100644 index 00000000..4d35738d --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/package.json b/node_modules/@types/node-fetch/node_modules/form-data/package.json new file mode 100644 index 00000000..a2fcb88d --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "3.0.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@types/node-fetch/package.json b/node_modules/@types/node-fetch/package.json new file mode 100755 index 00000000..12e32bc4 --- /dev/null +++ b/node_modules/@types/node-fetch/package.json @@ -0,0 +1,83 @@ +{ + "name": "@types/node-fetch", + "version": "2.6.1", + "description": "TypeScript definitions for node-fetch", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch", + "license": "MIT", + "contributors": [ + { + "name": "Torsten Werner", + "url": "https://github.com/torstenwerner", + "githubUsername": "torstenwerner" + }, + { + "name": "Niklas Lindgren", + "url": "https://github.com/nikcorg", + "githubUsername": "nikcorg" + }, + { + "name": "Vinay Bedre", + "url": "https://github.com/vinaybedre", + "githubUsername": "vinaybedre" + }, + { + "name": "Antonio Román", + "url": "https://github.com/kyranet", + "githubUsername": "kyranet" + }, + { + "name": "Andrew Leedham", + "url": "https://github.com/AndrewLeedham", + "githubUsername": "AndrewLeedham" + }, + { + "name": "Jason Li", + "url": "https://github.com/JasonLi914", + "githubUsername": "JasonLi914" + }, + { + "name": "Steve Faulkner", + "url": "https://github.com/southpolesteve", + "githubUsername": "southpolesteve" + }, + { + "name": "ExE Boss", + "url": "https://github.com/ExE-Boss", + "githubUsername": "ExE-Boss" + }, + { + "name": "Alex Savin", + "url": "https://github.com/alexandrusavin", + "githubUsername": "alexandrusavin" + }, + { + "name": "Alexis Tyler", + "url": "https://github.com/OmgImAlexis", + "githubUsername": "OmgImAlexis" + }, + { + "name": "Jakub Kisielewski", + "url": "https://github.com/kbkk", + "githubUsername": "kbkk" + }, + { + "name": "David Glasser", + "url": "https://github.com/glasser", + "githubUsername": "glasser" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node-fetch" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + }, + "typesPublisherContentHash": "c675d715eadc649103d0ecd377b5669dde03aa70390f1febf28750f5505e7b92", + "typeScriptVersion": "3.8" +} \ No newline at end of file diff --git a/node_modules/@types/tunnel/LICENSE b/node_modules/@types/tunnel/LICENSE new file mode 100755 index 00000000..9e841e7a --- /dev/null +++ b/node_modules/@types/tunnel/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/tunnel/README.md b/node_modules/@types/tunnel/README.md new file mode 100755 index 00000000..a2d8b618 --- /dev/null +++ b/node_modules/@types/tunnel/README.md @@ -0,0 +1,67 @@ +# Installation +> `npm install --save @types/tunnel` + +# Summary +This package contains type definitions for tunnel (https://github.com/koichik/node-tunnel/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel/index.d.ts) +````ts +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +```` + +### Additional Details + * Last updated: Fri, 02 Jul 2021 19:37:21 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [BendingBender](https://github.com/BendingBender). diff --git a/node_modules/@types/tunnel/index.d.ts b/node_modules/@types/tunnel/index.d.ts new file mode 100755 index 00000000..591eb216 --- /dev/null +++ b/node_modules/@types/tunnel/index.d.ts @@ -0,0 +1,47 @@ +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} diff --git a/node_modules/@types/tunnel/package.json b/node_modules/@types/tunnel/package.json new file mode 100755 index 00000000..5d3c8bbd --- /dev/null +++ b/node_modules/@types/tunnel/package.json @@ -0,0 +1,27 @@ +{ + "name": "@types/tunnel", + "version": "0.0.3", + "description": "TypeScript definitions for tunnel", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel", + "license": "MIT", + "contributors": [ + { + "name": "BendingBender", + "url": "https://github.com/BendingBender", + "githubUsername": "BendingBender" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/tunnel" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*" + }, + "typesPublisherContentHash": "049a52929c6badcca87fc0ebc590bc49744a75f883bf18df77393ba05ddb4e7b", + "typeScriptVersion": "3.6" +} \ No newline at end of file diff --git a/node_modules/abort-controller/LICENSE b/node_modules/abort-controller/LICENSE new file mode 100644 index 00000000..c914149a --- /dev/null +++ b/node_modules/abort-controller/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/abort-controller/README.md b/node_modules/abort-controller/README.md new file mode 100644 index 00000000..9de3e45f --- /dev/null +++ b/node_modules/abort-controller/README.md @@ -0,0 +1,98 @@ +# abort-controller + +[![npm version](https://img.shields.io/npm/v/abort-controller.svg)](https://www.npmjs.com/package/abort-controller) +[![Downloads/month](https://img.shields.io/npm/dm/abort-controller.svg)](http://www.npmtrends.com/abort-controller) +[![Build Status](https://travis-ci.org/mysticatea/abort-controller.svg?branch=master)](https://travis-ci.org/mysticatea/abort-controller) +[![Coverage Status](https://codecov.io/gh/mysticatea/abort-controller/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/abort-controller) +[![Dependency Status](https://david-dm.org/mysticatea/abort-controller.svg)](https://david-dm.org/mysticatea/abort-controller) + +An implementation of [WHATWG AbortController interface](https://dom.spec.whatwg.org/#interface-abortcontroller). + +```js +import AbortController from "abort-controller" + +const controller = new AbortController() +const signal = controller.signal + +signal.addEventListener("abort", () => { + console.log("aborted!") +}) + +controller.abort() +``` + +> https://jsfiddle.net/1r2994qp/1/ + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install abort-controller +``` + +Or download from [`dist` directory](./dist). + +- [dist/abort-controller.mjs](dist/abort-controller.mjs) ... ES modules version. +- [dist/abort-controller.js](dist/abort-controller.js) ... Common JS version. +- [dist/abort-controller.umd.js](dist/abort-controller.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +### Basic + +```js +import AbortController from "abort-controller" +// or +const AbortController = require("abort-controller") + +// or UMD version defines a global variable: +const AbortController = window.AbortControllerShim +``` + +If your bundler recognizes `browser` field of `package.json`, the imported `AbortController` is the native one and it doesn't contain shim (even if the native implementation was nothing). +If you wanted to polyfill `AbortController` for IE, use `abort-controller/polyfill`. + +### Polyfilling + +Importing `abort-controller/polyfill` assigns the `AbortController` shim to the `AbortController` global variable if the native implementation was nothing. + +```js +import "abort-controller/polyfill" +// or +require("abort-controller/polyfill") +``` + +### API + +#### AbortController + +> https://dom.spec.whatwg.org/#interface-abortcontroller + +##### controller.signal + +The [AbortSignal](https://dom.spec.whatwg.org/#interface-AbortSignal) object which is associated to this controller. + +##### controller.abort() + +Notify `abort` event to listeners that the `signal` has. + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/abort-controller/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/abort-controller/browser.js b/node_modules/abort-controller/browser.js new file mode 100644 index 00000000..b0c5ec37 --- /dev/null +++ b/node_modules/abort-controller/browser.js @@ -0,0 +1,13 @@ +/*globals self, window */ +"use strict" + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +module.exports = AbortController +module.exports.AbortSignal = AbortSignal +module.exports.default = AbortController diff --git a/node_modules/abort-controller/browser.mjs b/node_modules/abort-controller/browser.mjs new file mode 100644 index 00000000..a8f321af --- /dev/null +++ b/node_modules/abort-controller/browser.mjs @@ -0,0 +1,11 @@ +/*globals self, window */ + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.d.ts b/node_modules/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 00000000..75852fb5 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,43 @@ +import { EventTarget } from "event-target-shim" + +type Events = { + abort: any +} +type EventAttributes = { + onabort: any +} +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +declare class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() + /** + * Returns `true` if this `AbortSignal`"s `AbortController` has signaled to abort, and `false` otherwise. + */ + readonly aborted: boolean +} +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +declare class AbortController { + /** + * Initialize this controller. + */ + constructor() + /** + * Returns the `AbortSignal` object associated with this object. + */ + readonly signal: AbortSignal + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort(): void +} + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.js b/node_modules/abort-controller/dist/abort-controller.js new file mode 100644 index 00000000..49af7395 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js @@ -0,0 +1,127 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var eventTargetShim = require('event-target-shim'); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports.default = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map diff --git a/node_modules/abort-controller/dist/abort-controller.js.map b/node_modules/abort-controller/dist/abort-controller.js.map new file mode 100644 index 00000000..cfdcafdc --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.js","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["EventTarget","defineEventAttribute"],"mappings":";;;;;;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQA,2BAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACDC,oCAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnDD,2BAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.mjs b/node_modules/abort-controller/dist/abort-controller.mjs new file mode 100644 index 00000000..88ba22d5 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs @@ -0,0 +1,118 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +import { EventTarget, defineEventAttribute } from 'event-target-shim'; + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +export default AbortController; +export { AbortController, AbortSignal }; +//# sourceMappingURL=abort-controller.mjs.map diff --git a/node_modules/abort-controller/dist/abort-controller.mjs.map b/node_modules/abort-controller/dist/abort-controller.mjs.map new file mode 100644 index 00000000..1e8fa6b0 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.mjs","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":[],"mappings":";;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.umd.js b/node_modules/abort-controller/dist/abort-controller.umd.js new file mode 100644 index 00000000..f643cfd6 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.umd.js @@ -0,0 +1,5 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d\n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexport default EventTarget;\nexport { defineEventAttribute, EventTarget };\n//# sourceMappingURL=event-target-shim.mjs.map\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","createAbortSignal","signal","AbortSignal","EventTarget","abortedFlags","abortSignal","dispatchEvent","type","getSignal","controller","signals","TypeError","WeakMap","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","aborted","defineEventAttribute","defineProperties","Symbol","_typeof","toStringTag","AbortController","abort"],"mappings":";;;+3CAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZ,CAAgBH,CAAhB,QACbI,CAAAA,OAAO,CAACC,MAAR,CACY,IAAR,EAAAJ,CADJ,CAEI,6CAFJ,CAGID,CAHJ,EAKOC,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxB,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAAL,CAAWS,UAbS,GAiBzBF,CAAI,CAACG,QAAL,GAjByB,CAkBgB,UAArC,QAAOH,CAAAA,CAAI,CAACP,KAAL,CAAWW,cAlBG,EAmBrBJ,CAAI,CAACP,KAAL,CAAWW,cAAX,EAnBqB,QAGE,WAAnB,QAAOP,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAAR,CACI,oEADJ,CAEIL,CAAI,CAACC,eAFT,CANiB,EAiC7B,QAASK,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZ,CAAgB,IAAhB,CAAsB,CAClBD,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAAN,EAAmBC,IAAI,CAACC,GAAL,EATZ,CAAtB,CAD+B,CAc/BC,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4B,WAA5B,CAAyC,CAAEC,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzC,CAd+B,QAmBrBC,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAY5B,CAAZ,EACJ6B,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4BG,CAA5B,CAAiCI,CAAwB,CAACJ,CAAD,CAAzD,EAyOZ,QAASI,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,CAFR,CAAA,CAIHZ,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,EAAsBF,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAF,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL,CAAWO,KAAX,CAAiBlC,CAAjB,CAAwBmC,SAAxB,CAHR,CAAA,CAKHH,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAV,CAAe,IAAf,CAAqB1B,CAArB,CAAkCd,CAAlC,KAPE4B,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAYU,CAAZ,KACO,CAAhB,GAAAV,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZ,CAAwBlB,MAAM,CAACmB,MAAP,CAAcL,CAAS,CAACI,SAAxB,CAAmC,CACvDE,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnC,CAXa,KAgBhC,GACKjB,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAP,CAAgCR,CAAhC,CAAuCX,CAAvC,CADY,CAEzBoB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAP,CACIe,CAAW,CAACE,SADhB,CAEId,CAFJ,CAGIoB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlC,QAUDY,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAAT,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT,CAAamC,CAAb,QACC,KAAX,EAAAW,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBb,CAAtB,CAAD,CAAX,CAA2CA,CAA3C,EACvBY,CAAQ,CAACnC,GAAT,CAAauB,CAAb,CAAoBW,CAApB,GAEGA,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBnD,CAAtB,CAAD,QACnB,IAAIqD,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAF,CAAUmB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAF,CAAUgB,UAAV,CAAuBA,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAF,CAAUiB,aAAV,CAA0BA,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAF,CAAUQ,eAAV,CAA4BA,ysCC1ahBkD,CAAAA,OACNC,CAAAA,CAAM,CAAGpC,MAAM,CAACmB,MAAPnB,CAAcqC,CAAW,CAACnB,SAA1BlB,QACfsC,CAAAA,CAAW,CAACrB,IAAZqB,CAAiBF,CAAjBE,EACAC,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACOH,UAMKI,CAAAA,EAAYJ,GACpBG,KAAAA,CAAY,CAAC3D,GAAb2D,CAAiBH,CAAjBG,IAIJA,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACAH,CAAM,CAACK,aAAPL,CAA8B,CAAEM,IAAI,CAAE,OAAR,CAA9BN,GC9BJ,QAASO,CAAAA,CAAT,CAAmBC,CAAnB,KACUR,CAAAA,CAAM,CAAGS,CAAO,CAACjE,GAARiE,CAAYD,CAAZC,KACD,IAAVT,EAAAA,OACM,IAAIU,CAAAA,SAAJ,sEAEiB,IAAfF,GAAAA,CAAU,CAAY,MAAZ,GAA4BA,GAFxC,QAMHR,CAAAA,KF3BLzD,CAAAA,CAAW,CAAG,GAAIoE,CAAAA,QAOlBpB,CAAQ,CAAG,GAAIoB,CAAAA,QAkFrBzD,CAAK,CAAC4B,SAAN,CAAkB,IAKVwB,CAAAA,MAAO,OACAlE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiE,IANZ,CAAA,IAaVM,CAAAA,QAAS,OACFxE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASkB,aAtBN,CAAA,CA4BduD,YA5Bc,WA4BC,IACLvD,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAF,CAASkB,cADpB,MAEU,KAAjB,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVwD,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV5D,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASiB,UAzEN,CAAA,CAgFd6D,eAhFc,WAgFI,IACRtE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHc,CAI4B,UAAtC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAW6E,eAJR,EAKVtE,CAAI,CAACP,KAAL,CAAW6E,eAAX,EArFM,CAAA,CA6FdC,wBA7Fc,WA6Fa,IACjBvE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHuB,CAIvBX,CAAI,CAACY,gBAAL,GAJuB,CAK4B,UAA/C,QAAOZ,CAAAA,CAAI,CAACP,KAAL,CAAW8E,wBALC,EAMnBvE,CAAI,CAACP,KAAL,CAAW8E,wBAAX,EAnGM,CAAA,IA2GVC,CAAAA,SAAU,SACKhF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe+E,OA5GpB,CAAA,IAmHVtE,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeS,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIViF,CAAAA,kBAAmB,OACZjF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASW,QApIN,CAAA,IA2IVuE,CAAAA,UAAW,SACIlF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiF,QA5IpB,CAAA,IAmJV7D,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASqB,SApJN,CAAA,IA4JV8D,CAAAA,YAAa,OACNnF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WA7JN,CAAA,IAqKVqE,CAAAA,cAAe,OACRpF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASmB,OAtKN,CAAA,IAwKViE,CAAAA,aAAa1D,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,IACuC,SAAnC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAWmF,eAClB5E,CAAI,CAACP,KAAL,CAAWmF,YAAX,KAhLM,CAAA,IAyLVC,CAAAA,aAAc,OACP,CAACrF,CAAE,CAAC,IAAD,CAAF,CAASW,QA1LP,CAAA,IA4LV0E,CAAAA,YAAY3D,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMdsF,SAzMc,WAyMF,EAzME,EA+MlB9D,MAAM,CAACC,cAAP,CAAsBX,CAAK,CAAC4B,SAA5B,CAAuC,aAAvC,CAAsD,CAClDhB,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtD,EAOsB,WAAlB,QAAO0C,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAACzE,QAC/CU,MAAM,CAACgE,cAAP,CAAsB1E,CAAK,CAAC4B,SAA5B,CAAuC6C,MAAM,CAACzE,KAAP,CAAa4B,SAApD,EAGAS,CAAQ,CAACnC,GAAT,CAAauE,MAAM,CAACzE,KAAP,CAAa4B,SAA1B,CAAqC5B,CAArC,wiDChTiB+C,CAAAA,2EAMP,GAAIS,CAAAA,SAAJ,CAAc,4CAAd,sDAOAmB,CAAAA,CAAO,CAAG1B,CAAY,CAAC3D,GAAb2D,CAAiB,IAAjBA,KACO,SAAnB,QAAO0B,CAAAA,OACD,IAAInB,CAAAA,SAAJ,kEAEW,IAAT,QAAgB,MAAhB,GAAgC,MAFlC,QAMHmB,CAAAA,SArB0B3B,GAwBzC4B,CAAoB,CAAC7B,CAAW,CAACnB,SAAb,CAAwB,OAAxB,EA2BpB,GAAMqB,CAAAA,CAAY,CAAG,GAAIQ,CAAAA,OAAzB,CAGA/C,MAAM,CAACmE,gBAAPnE,CAAwBqC,CAAW,CAACnB,SAApClB,CAA+C,CAC3CiE,OAAO,CAAE,CAAE9D,UAAU,GAAZ,CADkC,CAA/CH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBqC,CAAW,CAACnB,SAAlClB,CAA6CoE,MAAM,CAACE,WAApDtE,CAAiE,CAC7DS,YAAY,GADiD,CAE7DP,KAAK,CAAE,aAFsD,CAAjEF,KC5EiBuE,CAAAA,oCAKb1B,CAAO,CAACrD,GAARqD,CAAY,IAAZA,CAAkBV,CAAiB,EAAnCU,4CAcAL,CAAW,CAACG,CAAS,CAAC,IAAD,CAAV,uCAPJA,CAAAA,CAAS,CAAC,IAAD,WAclBE,CAAO,CAAG,GAAIE,CAAAA,WAkBpB/C,MAAM,CAACmE,gBAAPnE,CAAwBuE,CAAe,CAACrD,SAAxClB,CAAmD,CAC/CoC,MAAM,CAAE,CAAEjC,UAAU,GAAZ,CADuC,CAE/CqE,KAAK,CAAE,CAAErE,UAAU,GAAZ,CAFwC,CAAnDH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBuE,CAAe,CAACrD,SAAtClB,CAAiDoE,MAAM,CAACE,WAAxDtE,CAAqE,CACjES,YAAY,GADqD,CAEjEP,KAAK,CAAE,iBAF0D,CAArEF"} \ No newline at end of file diff --git a/node_modules/abort-controller/package.json b/node_modules/abort-controller/package.json new file mode 100644 index 00000000..fc705e03 --- /dev/null +++ b/node_modules/abort-controller/package.json @@ -0,0 +1,97 @@ +{ + "name": "abort-controller", + "version": "3.0.0", + "description": "An implementation of WHATWG AbortController interface.", + "main": "dist/abort-controller", + "files": [ + "dist", + "polyfill.*", + "browser.*" + ], + "engines": { + "node": ">=6.5" + }, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "browser": "./browser.js", + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.3.0", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "@types/mocha": "^5.2.5", + "@types/node": "^10.12.18", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "dts-bundle-generator": "^2.0.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.1.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.2", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-typescript": "^1.0.0", + "rollup-watch": "^4.3.1", + "ts-node": "^8.0.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run -s build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "opener coverage/lcov-report/index.html", + "lint": "eslint . --ext .ts", + "build": "run-s -s build:*", + "build:rollup": "rollup -c", + "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts", + "test": "run-s -s lint test:*", + "test:mocha": "nyc mocha test/*.ts", + "test:karma": "karma start --single-run", + "watch": "run-p -s watch:*", + "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl", + "watch:karma": "karma start --watch", + "codecov": "codecov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mysticatea/abort-controller.git" + }, + "keywords": [ + "w3c", + "whatwg", + "event", + "events", + "abort", + "cancel", + "abortcontroller", + "abortsignal", + "controller", + "signal", + "shim" + ], + "author": "Toru Nagashima (https://github.com/mysticatea)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/abort-controller/issues" + }, + "homepage": "https://github.com/mysticatea/abort-controller#readme" +} diff --git a/node_modules/abort-controller/polyfill.js b/node_modules/abort-controller/polyfill.js new file mode 100644 index 00000000..3ca89233 --- /dev/null +++ b/node_modules/abort-controller/polyfill.js @@ -0,0 +1,21 @@ +/*globals require, self, window */ +"use strict" + +const ac = require("./dist/abort-controller") + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/abort-controller/polyfill.mjs b/node_modules/abort-controller/polyfill.mjs new file mode 100644 index 00000000..0602a64d --- /dev/null +++ b/node_modules/abort-controller/polyfill.mjs @@ -0,0 +1,19 @@ +/*globals self, window */ +import * as ac from "./dist/abort-controller" + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE new file mode 100644 index 00000000..c9eca5dd --- /dev/null +++ b/node_modules/asynckit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Alex Indigo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md new file mode 100644 index 00000000..ddcc7e6b --- /dev/null +++ b/node_modules/asynckit/README.md @@ -0,0 +1,233 @@ +# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) + +Minimal async jobs utility library, with streams support. + +[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) + +[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) +[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) +[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) + + + +AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. +Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. + +It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. + +| compression | size | +| :----------------- | -------: | +| asynckit.js | 12.34 kB | +| asynckit.min.js | 4.11 kB | +| asynckit.min.js.gz | 1.47 kB | + + +## Install + +```sh +$ npm install --save asynckit +``` + +## Examples + +### Parallel Jobs + +Runs iterator over provided array in parallel. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will terminate rest of the active jobs (if abort function is provided) +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var parallel = require('asynckit').parallel + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , target = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// async job accepts one element from the array +// and a callback function +function asyncJob(item, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var parallel = require('asynckit/parallel') + , assert = require('assert') + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] + , target = [] + , keys = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); + assert.deepEqual(keys, expectedKeys); +}); + +// supports full value, key, callback (shortcut) interface +function asyncJob(item, key, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + keys.push(key); + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). + +### Serial Jobs + +Runs iterator over provided array sequentially. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will not proceed to the rest of the items in the list +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var serial = require('asynckit/serial') + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// extended interface (item, key, callback) +// also supported for arrays +function asyncJob(item, key, cb) +{ + target.push(key); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var serial = require('asynckit').serial + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , target = [] + ; + + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// shortcut interface (item, callback) +// works for object as well as for the arrays +function asyncJob(item, cb) +{ + target.push(item); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). + +_Note: Since _object_ is an _unordered_ collection of properties, +it may produce unexpected results with sequential iterations. +Whenever order of the jobs' execution is important please use `serialOrdered` method._ + +### Ordered Serial Iterations + +TBD + +For example [compare-property](compare-property) package. + +### Streaming interface + +TBD + +## Want to Know More? + +More examples can be found in [test folder](test/). + +Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. + +## License + +AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js new file mode 100644 index 00000000..c612f1a5 --- /dev/null +++ b/node_modules/asynckit/bench.js @@ -0,0 +1,76 @@ +/* eslint no-console: "off" */ + +var asynckit = require('./') + , async = require('async') + , assert = require('assert') + , expected = 0 + ; + +var Benchmark = require('benchmark'); +var suite = new Benchmark.Suite; + +var source = []; +for (var z = 1; z < 100; z++) +{ + source.push(z); + expected += z; +} + +suite +// add tests + +.add('async.map', function(deferred) +{ + var total = 0; + + async.map(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +.add('asynckit.parallel', function(deferred) +{ + var total = 0; + + asynckit.parallel(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +// add listeners +.on('cycle', function(ev) +{ + console.log(String(ev.target)); +}) +.on('complete', function() +{ + console.log('Fastest is ' + this.filter('fastest').map('name')); +}) +// run async +.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js new file mode 100644 index 00000000..455f9454 --- /dev/null +++ b/node_modules/asynckit/index.js @@ -0,0 +1,6 @@ +module.exports = +{ + parallel : require('./parallel.js'), + serial : require('./serial.js'), + serialOrdered : require('./serialOrdered.js') +}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js new file mode 100644 index 00000000..114367e5 --- /dev/null +++ b/node_modules/asynckit/lib/abort.js @@ -0,0 +1,29 @@ +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js new file mode 100644 index 00000000..7f1288a4 --- /dev/null +++ b/node_modules/asynckit/lib/async.js @@ -0,0 +1,34 @@ +var defer = require('./defer.js'); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js new file mode 100644 index 00000000..b67110c7 --- /dev/null +++ b/node_modules/asynckit/lib/defer.js @@ -0,0 +1,26 @@ +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js new file mode 100644 index 00000000..5d2839a5 --- /dev/null +++ b/node_modules/asynckit/lib/iterate.js @@ -0,0 +1,75 @@ +var async = require('./async.js') + , abort = require('./abort.js') + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js new file mode 100644 index 00000000..78ad240f --- /dev/null +++ b/node_modules/asynckit/lib/readable_asynckit.js @@ -0,0 +1,91 @@ +var streamify = require('./streamify.js') + , defer = require('./defer.js') + ; + +// API +module.exports = ReadableAsyncKit; + +/** + * Base constructor for all streams + * used to hold properties/methods + */ +function ReadableAsyncKit() +{ + ReadableAsyncKit.super_.apply(this, arguments); + + // list of active jobs + this.jobs = {}; + + // add stream methods + this.destroy = destroy; + this._start = _start; + this._read = _read; +} + +/** + * Destroys readable stream, + * by aborting outstanding jobs + * + * @returns {void} + */ +function destroy() +{ + if (this.destroyed) + { + return; + } + + this.destroyed = true; + + if (typeof this.terminator == 'function') + { + this.terminator(); + } +} + +/** + * Starts provided jobs in async manner + * + * @private + */ +function _start() +{ + // first argument – runner function + var runner = arguments[0] + // take away first argument + , args = Array.prototype.slice.call(arguments, 1) + // second argument - input data + , input = args[0] + // last argument - result callback + , endCb = streamify.callback.call(this, args[args.length - 1]) + ; + + args[args.length - 1] = endCb; + // third argument - iterator + args[1] = streamify.iterator.call(this, args[1]); + + // allow time for proper setup + defer(function() + { + if (!this.destroyed) + { + this.terminator = runner.apply(null, args); + } + else + { + endCb(null, Array.isArray(input) ? [] : {}); + } + }.bind(this)); +} + + +/** + * Implement _read to comply with Readable streams + * Doesn't really make sense for flowing object mode + * + * @private + */ +function _read() +{ + +} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js new file mode 100644 index 00000000..5d2929f7 --- /dev/null +++ b/node_modules/asynckit/lib/readable_parallel.js @@ -0,0 +1,25 @@ +var parallel = require('../parallel.js'); + +// API +module.exports = ReadableParallel; + +/** + * Streaming wrapper to `asynckit.parallel` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableParallel(list, iterator, callback) +{ + if (!(this instanceof ReadableParallel)) + { + return new ReadableParallel(list, iterator, callback); + } + + // turn on object mode + ReadableParallel.super_.call(this, {objectMode: true}); + + this._start(parallel, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js new file mode 100644 index 00000000..78226982 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial.js @@ -0,0 +1,25 @@ +var serial = require('../serial.js'); + +// API +module.exports = ReadableSerial; + +/** + * Streaming wrapper to `asynckit.serial` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerial(list, iterator, callback) +{ + if (!(this instanceof ReadableSerial)) + { + return new ReadableSerial(list, iterator, callback); + } + + // turn on object mode + ReadableSerial.super_.call(this, {objectMode: true}); + + this._start(serial, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js new file mode 100644 index 00000000..3de89c47 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial_ordered.js @@ -0,0 +1,29 @@ +var serialOrdered = require('../serialOrdered.js'); + +// API +module.exports = ReadableSerialOrdered; +// expose sort helpers +module.exports.ascending = serialOrdered.ascending; +module.exports.descending = serialOrdered.descending; + +/** + * Streaming wrapper to `asynckit.serialOrdered` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerialOrdered(list, iterator, sortMethod, callback) +{ + if (!(this instanceof ReadableSerialOrdered)) + { + return new ReadableSerialOrdered(list, iterator, sortMethod, callback); + } + + // turn on object mode + ReadableSerialOrdered.super_.call(this, {objectMode: true}); + + this._start(serialOrdered, list, iterator, sortMethod, callback); +} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js new file mode 100644 index 00000000..cbea7ad8 --- /dev/null +++ b/node_modules/asynckit/lib/state.js @@ -0,0 +1,37 @@ +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js new file mode 100644 index 00000000..f56a1c92 --- /dev/null +++ b/node_modules/asynckit/lib/streamify.js @@ -0,0 +1,141 @@ +var async = require('./async.js'); + +// API +module.exports = { + iterator: wrapIterator, + callback: wrapCallback +}; + +/** + * Wraps iterators with long signature + * + * @this ReadableAsyncKit# + * @param {function} iterator - function to wrap + * @returns {function} - wrapped function + */ +function wrapIterator(iterator) +{ + var stream = this; + + return function(item, key, cb) + { + var aborter + , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) + ; + + stream.jobs[key] = wrappedCb; + + // it's either shortcut (item, cb) + if (iterator.length == 2) + { + aborter = iterator(item, wrappedCb); + } + // or long format (item, key, cb) + else + { + aborter = iterator(item, key, wrappedCb); + } + + return aborter; + }; +} + +/** + * Wraps provided callback function + * allowing to execute snitch function before + * real callback + * + * @this ReadableAsyncKit# + * @param {function} callback - function to wrap + * @returns {function} - wrapped function + */ +function wrapCallback(callback) +{ + var stream = this; + + var wrapped = function(error, result) + { + return finisher.call(stream, error, result, callback); + }; + + return wrapped; +} + +/** + * Wraps provided iterator callback function + * makes sure snitch only called once, + * but passes secondary calls to the original callback + * + * @this ReadableAsyncKit# + * @param {function} callback - callback to wrap + * @param {number|string} key - iteration key + * @returns {function} wrapped callback + */ +function wrapIteratorCallback(callback, key) +{ + var stream = this; + + return function(error, output) + { + // don't repeat yourself + if (!(key in stream.jobs)) + { + callback(error, output); + return; + } + + // clean up jobs + delete stream.jobs[key]; + + return streamer.call(stream, error, {key: key, value: output}, callback); + }; +} + +/** + * Stream wrapper for iterator callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects iterator results + */ +function streamer(error, output, callback) +{ + if (error && !this.error) + { + this.error = error; + this.pause(); + this.emit('error', error); + // send back value only, as expected + callback(error, output && output.value); + return; + } + + // stream stuff + this.push(output); + + // back to original track + // send back value only, as expected + callback(error, output && output.value); +} + +/** + * Stream wrapper for finishing callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects final results + */ +function finisher(error, output, callback) +{ + // signal end of the stream + // only for successfully finished streams + if (!error) + { + this.push(null); + } + + // back to original track + callback(error, output); +} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js new file mode 100644 index 00000000..d6eb9921 --- /dev/null +++ b/node_modules/asynckit/lib/terminator.js @@ -0,0 +1,29 @@ +var abort = require('./abort.js') + , async = require('./async.js') + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json new file mode 100644 index 00000000..51147d65 --- /dev/null +++ b/node_modules/asynckit/package.json @@ -0,0 +1,63 @@ +{ + "name": "asynckit", + "version": "0.4.0", + "description": "Minimal async jobs utility library, with streams support", + "main": "index.js", + "scripts": { + "clean": "rimraf coverage", + "lint": "eslint *.js lib/*.js test/*.js", + "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", + "win-test": "tape test/test-*.js", + "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", + "report": "istanbul report", + "size": "browserify index.js | size-table asynckit", + "debug": "tape test/test-*.js" + }, + "pre-commit": [ + "clean", + "lint", + "test", + "browser", + "report", + "size" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/alexindigo/asynckit.git" + }, + "keywords": [ + "async", + "jobs", + "parallel", + "serial", + "iterator", + "array", + "object", + "stream", + "destroy", + "terminate", + "abort" + ], + "author": "Alex Indigo ", + "license": "MIT", + "bugs": { + "url": "https://github.com/alexindigo/asynckit/issues" + }, + "homepage": "https://github.com/alexindigo/asynckit#readme", + "devDependencies": { + "browserify": "^13.0.0", + "browserify-istanbul": "^2.0.0", + "coveralls": "^2.11.9", + "eslint": "^2.9.0", + "istanbul": "^0.4.3", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.7", + "pre-commit": "^1.1.3", + "reamde": "^1.1.0", + "rimraf": "^2.5.2", + "size-table": "^0.2.0", + "tap-spec": "^4.1.1", + "tape": "^4.5.1" + }, + "dependencies": {} +} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js new file mode 100644 index 00000000..3c50344d --- /dev/null +++ b/node_modules/asynckit/parallel.js @@ -0,0 +1,43 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js new file mode 100644 index 00000000..6cd949a6 --- /dev/null +++ b/node_modules/asynckit/serial.js @@ -0,0 +1,17 @@ +var serialOrdered = require('./serialOrdered.js'); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js new file mode 100644 index 00000000..607eafea --- /dev/null +++ b/node_modules/asynckit/serialOrdered.js @@ -0,0 +1,75 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js new file mode 100644 index 00000000..d43465f9 --- /dev/null +++ b/node_modules/asynckit/stream.js @@ -0,0 +1,21 @@ +var inherits = require('util').inherits + , Readable = require('stream').Readable + , ReadableAsyncKit = require('./lib/readable_asynckit.js') + , ReadableParallel = require('./lib/readable_parallel.js') + , ReadableSerial = require('./lib/readable_serial.js') + , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') + ; + +// API +module.exports = +{ + parallel : ReadableParallel, + serial : ReadableSerial, + serialOrdered : ReadableSerialOrdered, +}; + +inherits(ReadableAsyncKit, Readable); + +inherits(ReadableParallel, ReadableAsyncKit); +inherits(ReadableSerial, ReadableAsyncKit); +inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 00000000..cea8b16e --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 00000000..2cdc8e41 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 00000000..d2a48b6b --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 00000000..c67a6460 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 00000000..ce6073e0 --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 00000000..de322667 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 00000000..6b4e0e16 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 00000000..0478be81 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 00000000..a18faa8f --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/cache/.eslintrc.json b/node_modules/cache/.eslintrc.json new file mode 100644 index 00000000..97b1a620 --- /dev/null +++ b/node_modules/cache/.eslintrc.json @@ -0,0 +1,22 @@ +{ + "env": { "node": true, "jest": true }, + "parser": "@typescript-eslint/parser", + "parserOptions": { "ecmaVersion": 2020, "sourceType": "module" }, + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + "plugin:import/errors", + "plugin:import/warnings", + "plugin:import/typescript", + "plugin:prettier/recommended" + ], + "plugins": ["@typescript-eslint", "simple-import-sort", "jest"], + "rules": { + "import/first": "error", + "import/newline-after-import": "error", + "import/no-duplicates": "error", + "simple-import-sort/imports": "error", + "sort-imports": "off" + } +} diff --git a/node_modules/cache/.gitattributes b/node_modules/cache/.gitattributes new file mode 100644 index 00000000..6e67b9bd --- /dev/null +++ b/node_modules/cache/.gitattributes @@ -0,0 +1,2 @@ +.licenses/** -diff linguist-generated=true +* text=auto eol=lf \ No newline at end of file diff --git a/node_modules/cache/.github/CODEOWNERS b/node_modules/cache/.github/CODEOWNERS new file mode 100644 index 00000000..342d7b02 --- /dev/null +++ b/node_modules/cache/.github/CODEOWNERS @@ -0,0 +1 @@ +* @actions/actions-cache diff --git a/node_modules/cache/.github/auto_assign.yml b/node_modules/cache/.github/auto_assign.yml new file mode 100644 index 00000000..0cbc2696 --- /dev/null +++ b/node_modules/cache/.github/auto_assign.yml @@ -0,0 +1,17 @@ +# Set to true to add reviewers to pull requests +addReviewers: true + +# Set to true to add assignees to pull requests +addAssignees: false + +# A list of reviewers to be added to pull requests (GitHub user name) +reviewers: + - phantsure + - aparna-ravindra + - tiwarishub + - vsvipul + - bishal-pdmsft + +# A number of reviewers added to the pull request +# Set 0 to add all the reviewers (default: 0) +numberOfReviewers: 1 diff --git a/node_modules/cache/.github/workflows/auto-assign-issues.yml b/node_modules/cache/.github/workflows/auto-assign-issues.yml new file mode 100644 index 00000000..23ddc288 --- /dev/null +++ b/node_modules/cache/.github/workflows/auto-assign-issues.yml @@ -0,0 +1,15 @@ +name: Issue assignment + +on: + issues: + types: [opened] + +jobs: + auto-assign: + runs-on: ubuntu-latest + steps: + - name: 'Auto-assign issue' + uses: pozil/auto-assign-issue@v1.4.0 + with: + assignees: phantsure,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft + numOfAssignee: 1 diff --git a/node_modules/cache/.github/workflows/auto-assign.yml b/node_modules/cache/.github/workflows/auto-assign.yml new file mode 100644 index 00000000..4dcc6128 --- /dev/null +++ b/node_modules/cache/.github/workflows/auto-assign.yml @@ -0,0 +1,10 @@ +name: 'Auto Assign' +on: + pull_request: + types: [opened, ready_for_review] + +jobs: + add-reviews: + runs-on: ubuntu-latest + steps: + - uses: kentaro-m/auto-assign-action@v1.2.1 diff --git a/node_modules/cache/.github/workflows/check-dist.yml b/node_modules/cache/.github/workflows/check-dist.yml new file mode 100644 index 00000000..00f50362 --- /dev/null +++ b/node_modules/cache/.github/workflows/check-dist.yml @@ -0,0 +1,49 @@ +# `dist/index.js` is a special file in Actions. +# When you reference an action with `uses:` in a workflow, +# `index.js` is the code that will run. +# For our project, we generate this file through a build process +# from other source files. +# We need to make sure the checked-in `index.js` actually matches what we expect it to be. +name: Check dist/ + +on: + push: + branches: + - main + paths-ignore: + - '**.md' + pull_request: + paths-ignore: + - '**.md' + workflow_dispatch: + +jobs: + check-dist: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Setup Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: 16.x + - name: Install dependencies + run: npm ci + - name: Rebuild the dist/ directory + run: npm run build + + - name: Compare the expected and actual dist/ directories + run: | + if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then + echo "Detected uncommitted changes after build. See status below:" + git diff + exit 1 + fi + id: diff + + # If index.js was different than expected, upload the expected version as an artifact + - uses: actions/upload-artifact@v3 + if: ${{ failure() && steps.diff.conclusion == 'failure' }} + with: + name: dist + path: dist/ diff --git a/node_modules/cache/.github/workflows/close-inactive-issues.yml b/node_modules/cache/.github/workflows/close-inactive-issues.yml new file mode 100644 index 00000000..aea77e98 --- /dev/null +++ b/node_modules/cache/.github/workflows/close-inactive-issues.yml @@ -0,0 +1,22 @@ +name: Close inactive issues +on: + schedule: + - cron: "30 8 * * *" + +jobs: + close-issues: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v3 + with: + days-before-issue-stale: 200 + days-before-issue-close: 5 + stale-issue-label: "stale" + stale-issue-message: "This issue is stale because it has been open for 200 days with no activity. Leave a comment to avoid closing this issue in 5 days." + close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale." + days-before-pr-stale: -1 + days-before-pr-close: -1 + repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/cache/.github/workflows/codeql.yml b/node_modules/cache/.github/workflows/codeql.yml new file mode 100644 index 00000000..8a4f83cc --- /dev/null +++ b/node_modules/cache/.github/workflows/codeql.yml @@ -0,0 +1,52 @@ +name: "Code scanning - action" + +on: + push: + pull_request: + schedule: + - cron: '0 19 * * 0' + +jobs: + CodeQL-Build: + + # CodeQL runs on ubuntu-latest and windows-latest + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + # We must fetch at least the immediate parents so that if this is + # a pull request then we can checkout the head. + fetch-depth: 2 + + # If this run was triggered by a pull request event, then checkout + # the head of the pull request instead of the merge commit. + - run: git checkout HEAD^2 + if: ${{ github.event_name == 'pull_request' }} + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + # Override language selection by uncommenting this and choosing your languages + # with: + # languages: go, javascript, csharp, python, cpp, java + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/node_modules/cache/.github/workflows/licensed.yml b/node_modules/cache/.github/workflows/licensed.yml new file mode 100644 index 00000000..04af62ab --- /dev/null +++ b/node_modules/cache/.github/workflows/licensed.yml @@ -0,0 +1,24 @@ +name: Licensed + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + runs-on: ubuntu-latest + name: Check licenses + steps: + - uses: actions/checkout@v3 + - run: npm ci + - name: Install licensed + run: | + cd $RUNNER_TEMP + curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/2.12.2/licensed-2.12.2-linux-x64.tar.gz + sudo tar -xzf licensed.tar.gz + sudo mv licensed /usr/local/bin/licensed + - run: licensed status diff --git a/node_modules/cache/.github/workflows/workflow.yml b/node_modules/cache/.github/workflows/workflow.yml new file mode 100644 index 00000000..48f1ccb7 --- /dev/null +++ b/node_modules/cache/.github/workflows/workflow.yml @@ -0,0 +1,143 @@ +name: Tests + +on: + pull_request: + branches: + - main + - releases/** + paths-ignore: + - '**.md' + push: + branches: + - main + - releases/** + paths-ignore: + - '**.md' + +jobs: + # Build and unit test + build: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: 16.x + - name: Determine npm cache directory + id: npm-cache + run: | + echo "::set-output name=dir::$(npm config get cache)" + - name: Restore npm cache + uses: actions/cache@v3 + with: + path: ${{ steps.npm-cache.outputs.dir }} + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + - run: npm ci + - name: Prettier Format Check + run: npm run format-check + - name: ESLint Check + run: npm run lint + - name: Build & Test + run: npm run test + + # End to end save and restore + test-save: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Generate files in working directory + shell: bash + run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache + - name: Generate files outside working directory + shell: bash + run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache + - name: Save cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: | + test-cache + ~/test-cache + test-restore: + needs: test-save + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Restore cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: | + test-cache + ~/test-cache + - name: Verify cache files in working directory + shell: bash + run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache + - name: Verify cache files outside working directory + shell: bash + run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache + + # End to end with proxy + test-proxy-save: + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Generate files + run: __tests__/create-cache-files.sh proxy test-cache + - name: Save cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + test-proxy-restore: + needs: test-proxy-save + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Restore cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + - name: Verify cache + run: __tests__/verify-cache-files.sh proxy test-cache diff --git a/node_modules/cache/.licensed.yml b/node_modules/cache/.licensed.yml new file mode 100644 index 00000000..180cc589 --- /dev/null +++ b/node_modules/cache/.licensed.yml @@ -0,0 +1,16 @@ +sources: + npm: true + +allowed: + - apache-2.0 + - bsd-2-clause + - bsd-3-clause + - isc + - mit + - cc0-1.0 + - unlicense + - 0bsd + +reviewed: + npm: + - sax \ No newline at end of file diff --git a/node_modules/cache/.licenses/npm/@actions/cache.dep.yml b/node_modules/cache/.licenses/npm/@actions/cache.dep.yml new file mode 100644 index 00000000..318da5de --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/cache.dep.yml @@ -0,0 +1,20 @@ +--- +name: "@actions/cache" +version: 2.0.2 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE.md + text: |- + The MIT License (MIT) + + Copyright 2019 GitHub + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@actions/core.dep.yml b/node_modules/cache/.licenses/npm/@actions/core.dep.yml new file mode 100644 index 00000000..6ef1a3d4 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/core.dep.yml @@ -0,0 +1,20 @@ +--- +name: "@actions/core" +version: 1.7.0 +type: npm +summary: Actions core lib +homepage: https://github.com/actions/toolkit/tree/main/packages/core +license: mit +licenses: +- sources: LICENSE.md + text: |- + The MIT License (MIT) + + Copyright 2019 GitHub + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@actions/exec.dep.yml b/node_modules/cache/.licenses/npm/@actions/exec.dep.yml new file mode 100644 index 00000000..9338ea7e --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/exec.dep.yml @@ -0,0 +1,20 @@ +--- +name: "@actions/exec" +version: 1.1.1 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE.md + text: |- + The MIT License (MIT) + + Copyright 2019 GitHub + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@actions/glob.dep.yml b/node_modules/cache/.licenses/npm/@actions/glob.dep.yml new file mode 100644 index 00000000..becb37de --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/glob.dep.yml @@ -0,0 +1,20 @@ +--- +name: "@actions/glob" +version: 0.1.2 +type: npm +summary: Actions glob lib +homepage: https://github.com/actions/toolkit/tree/main/packages/glob +license: mit +licenses: +- sources: LICENSE.md + text: |- + The MIT License (MIT) + + Copyright 2019 GitHub + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@actions/http-client.dep.yml b/node_modules/cache/.licenses/npm/@actions/http-client.dep.yml new file mode 100644 index 00000000..43316cbc --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/http-client.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@actions/http-client" +version: 1.0.11 +type: npm +summary: Actions Http Client +homepage: https://github.com/actions/http-client#readme +license: mit +licenses: +- sources: LICENSE + text: | + Actions Http Client for Node.js + + Copyright (c) GitHub, Inc. + + All rights reserved. + + MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and + associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT + LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN + NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@actions/io.dep.yml b/node_modules/cache/.licenses/npm/@actions/io.dep.yml new file mode 100644 index 00000000..3be0c5c4 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@actions/io.dep.yml @@ -0,0 +1,20 @@ +--- +name: "@actions/io" +version: 1.1.2 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE.md + text: |- + The MIT License (MIT) + + Copyright 2019 GitHub + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/abort-controller.dep.yml b/node_modules/cache/.licenses/npm/@azure/abort-controller.dep.yml new file mode 100644 index 00000000..f303d5c3 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/abort-controller.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/abort-controller" +version: 1.0.4 +type: npm +summary: Microsoft Azure SDK for JavaScript - Aborter +homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/abort-controller/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml new file mode 100644 index 00000000..b1c8a94f --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/core-asynciterator-polyfill" +version: 1.0.2 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE + text: |2 + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-auth.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-auth.dep.yml new file mode 100644 index 00000000..830504b6 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-auth.dep.yml @@ -0,0 +1,33 @@ +--- +name: "@azure/core-auth" +version: 1.3.2 +type: npm +summary: Provides low-level interfaces and helper methods for authentication in Azure + SDK +homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-auth/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-http.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-http.dep.yml new file mode 100644 index 00000000..411b834d --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-http.dep.yml @@ -0,0 +1,33 @@ +--- +name: "@azure/core-http" +version: 2.2.4 +type: npm +summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client + libraries generated using AutoRest +homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-lro.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-lro.dep.yml new file mode 100644 index 00000000..66017699 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-lro.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/core-lro" +version: 2.2.4 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-paging.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-paging.dep.yml new file mode 100644 index 00000000..6c805290 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-paging.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/core-paging" +version: 1.2.1 +type: npm +summary: Core types for paging async iterable iterators +homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-paging/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/core-tracing.dep.yml b/node_modules/cache/.licenses/npm/@azure/core-tracing.dep.yml new file mode 100644 index 00000000..a4649e88 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/core-tracing.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/core-tracing" +version: 1.0.0-preview.13 +type: npm +summary: Provides low-level interfaces and helper methods for tracing in Azure SDK +homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/logger.dep.yml b/node_modules/cache/.licenses/npm/@azure/logger.dep.yml new file mode 100644 index 00000000..978e29a3 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/logger.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/logger" +version: 1.0.3 +type: npm +summary: Microsoft Azure SDK for JavaScript - Logger +homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger/README.md +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/ms-rest-js.dep.yml b/node_modules/cache/.licenses/npm/@azure/ms-rest-js.dep.yml new file mode 100644 index 00000000..6bc05b4e --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/ms-rest-js.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/ms-rest-js" +version: 2.6.1 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE + text: |2 + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +notices: [] diff --git a/node_modules/cache/.licenses/npm/@azure/storage-blob.dep.yml b/node_modules/cache/.licenses/npm/@azure/storage-blob.dep.yml new file mode 100644 index 00000000..d6b0f00f --- /dev/null +++ b/node_modules/cache/.licenses/npm/@azure/storage-blob.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@azure/storage-blob" +version: 12.9.0 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2020 Microsoft + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/@opentelemetry/api.dep.yml b/node_modules/cache/.licenses/npm/@opentelemetry/api.dep.yml new file mode 100644 index 00000000..98dcaf46 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@opentelemetry/api.dep.yml @@ -0,0 +1,225 @@ +--- +name: "@opentelemetry/api" +version: 1.0.4 +type: npm +summary: Public API for OpenTelemetry +homepage: https://github.com/open-telemetry/opentelemetry-js-api#readme +license: apache-2.0 +licenses: +- sources: LICENSE + text: |2 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +- sources: README.md + text: |- + Apache 2.0 - See [LICENSE][license-url] for more information. + + [opentelemetry-js]: https://github.com/open-telemetry/opentelemetry-js + + [discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions + [license-url]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/LICENSE + [license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat + [npm-url]: https://www.npmjs.com/package/@opentelemetry/api + [npm-img]: https://badge.fury.io/js/%40opentelemetry%2Fapi.svg + [docs-tracing]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/tracing.md + [docs-sdk-registration]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/sdk-registration.md +notices: [] diff --git a/node_modules/cache/.licenses/npm/@types/node-fetch.dep.yml b/node_modules/cache/.licenses/npm/@types/node-fetch.dep.yml new file mode 100644 index 00000000..36c50f15 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@types/node-fetch.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@types/node-fetch" +version: 2.6.1 +type: npm +summary: +homepage: +license: mit +licenses: +- sources: LICENSE + text: |2 + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +notices: [] diff --git a/node_modules/cache/.licenses/npm/@types/node.dep.yml b/node_modules/cache/.licenses/npm/@types/node.dep.yml new file mode 100644 index 00000000..18549f37 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@types/node.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@types/node" +version: 16.11.33 +type: npm +summary: TypeScript definitions for Node.js +homepage: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node +license: mit +licenses: +- sources: LICENSE + text: |2 + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +notices: [] diff --git a/node_modules/cache/.licenses/npm/@types/tunnel.dep.yml b/node_modules/cache/.licenses/npm/@types/tunnel.dep.yml new file mode 100644 index 00000000..b3636b02 --- /dev/null +++ b/node_modules/cache/.licenses/npm/@types/tunnel.dep.yml @@ -0,0 +1,32 @@ +--- +name: "@types/tunnel" +version: 0.0.3 +type: npm +summary: TypeScript definitions for tunnel +homepage: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel +license: mit +licenses: +- sources: LICENSE + text: |2 + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE +notices: [] diff --git a/node_modules/cache/.licenses/npm/abort-controller.dep.yml b/node_modules/cache/.licenses/npm/abort-controller.dep.yml new file mode 100644 index 00000000..492a609b --- /dev/null +++ b/node_modules/cache/.licenses/npm/abort-controller.dep.yml @@ -0,0 +1,32 @@ +--- +name: abort-controller +version: 3.0.0 +type: npm +summary: An implementation of WHATWG AbortController interface. +homepage: https://github.com/mysticatea/abort-controller#readme +license: mit +licenses: +- sources: LICENSE + text: | + MIT License + + Copyright (c) 2017 Toru Nagashima + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/asynckit.dep.yml b/node_modules/cache/.licenses/npm/asynckit.dep.yml new file mode 100644 index 00000000..905e0aaa --- /dev/null +++ b/node_modules/cache/.licenses/npm/asynckit.dep.yml @@ -0,0 +1,34 @@ +--- +name: asynckit +version: 0.4.0 +type: npm +summary: Minimal async jobs utility library, with streams support +homepage: https://github.com/alexindigo/asynckit#readme +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2016 Alex Indigo + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +- sources: README.md + text: AsyncKit is licensed under the MIT license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/balanced-match.dep.yml b/node_modules/cache/.licenses/npm/balanced-match.dep.yml new file mode 100644 index 00000000..36095592 --- /dev/null +++ b/node_modules/cache/.licenses/npm/balanced-match.dep.yml @@ -0,0 +1,55 @@ +--- +name: balanced-match +version: 1.0.2 +type: npm +summary: Match balanced character pairs, like "{" and "}" +homepage: https://github.com/juliangruber/balanced-match +license: mit +licenses: +- sources: LICENSE.md + text: | + (MIT) + + Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +- sources: README.md + text: |- + (MIT) + + Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/brace-expansion.dep.yml b/node_modules/cache/.licenses/npm/brace-expansion.dep.yml new file mode 100644 index 00000000..8fa6cfb3 --- /dev/null +++ b/node_modules/cache/.licenses/npm/brace-expansion.dep.yml @@ -0,0 +1,55 @@ +--- +name: brace-expansion +version: 1.1.11 +type: npm +summary: Brace expansion as known from sh/bash +homepage: https://github.com/juliangruber/brace-expansion +license: mit +licenses: +- sources: LICENSE + text: | + MIT License + + Copyright (c) 2013 Julian Gruber + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +- sources: README.md + text: |- + (MIT) + + Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/combined-stream.dep.yml b/node_modules/cache/.licenses/npm/combined-stream.dep.yml new file mode 100644 index 00000000..2b392155 --- /dev/null +++ b/node_modules/cache/.licenses/npm/combined-stream.dep.yml @@ -0,0 +1,32 @@ +--- +name: combined-stream +version: 1.0.8 +type: npm +summary: A stream that emits multiple other streams one after another. +homepage: https://github.com/felixge/node-combined-stream +license: mit +licenses: +- sources: License + text: | + Copyright (c) 2011 Debuggable Limited + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: Readme.md + text: combined-stream is licensed under the MIT license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/concat-map.dep.yml b/node_modules/cache/.licenses/npm/concat-map.dep.yml new file mode 100644 index 00000000..20216b95 --- /dev/null +++ b/node_modules/cache/.licenses/npm/concat-map.dep.yml @@ -0,0 +1,31 @@ +--- +name: concat-map +version: 0.0.1 +type: npm +summary: concatenative mapdashery +homepage: https://github.com/substack/node-concat-map#readme +license: other +licenses: +- sources: LICENSE + text: | + This software is released under the MIT license: + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: README.markdown + text: MIT +notices: [] diff --git a/node_modules/cache/.licenses/npm/delayed-stream.dep.yml b/node_modules/cache/.licenses/npm/delayed-stream.dep.yml new file mode 100644 index 00000000..12401217 --- /dev/null +++ b/node_modules/cache/.licenses/npm/delayed-stream.dep.yml @@ -0,0 +1,32 @@ +--- +name: delayed-stream +version: 1.0.0 +type: npm +summary: Buffers events from a stream until you are ready to handle them. +homepage: https://github.com/felixge/node-delayed-stream +license: mit +licenses: +- sources: License + text: | + Copyright (c) 2011 Debuggable Limited + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: Readme.md + text: delayed-stream is licensed under the MIT license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/event-target-shim.dep.yml b/node_modules/cache/.licenses/npm/event-target-shim.dep.yml new file mode 100644 index 00000000..7a3fc011 --- /dev/null +++ b/node_modules/cache/.licenses/npm/event-target-shim.dep.yml @@ -0,0 +1,33 @@ +--- +name: event-target-shim +version: 5.0.1 +type: npm +summary: An implementation of WHATWG EventTarget interface. +homepage: https://github.com/mysticatea/event-target-shim +license: mit +licenses: +- sources: LICENSE + text: |+ + The MIT License (MIT) + + Copyright (c) 2015 Toru Nagashima + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +notices: [] diff --git a/node_modules/cache/.licenses/npm/events.dep.yml b/node_modules/cache/.licenses/npm/events.dep.yml new file mode 100644 index 00000000..baae3b6b --- /dev/null +++ b/node_modules/cache/.licenses/npm/events.dep.yml @@ -0,0 +1,38 @@ +--- +name: events +version: 3.3.0 +type: npm +summary: Node's event emitter for all engines. +homepage: https://github.com/Gozala/events#readme +license: mit +licenses: +- sources: LICENSE + text: | + MIT + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to permit + persons to whom the Software is furnished to do so, subject to the + following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN + NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: Readme.md + text: |- + [MIT](./LICENSE) + + [node.js docs]: https://nodejs.org/dist/v11.13.0/docs/api/events.html +notices: [] diff --git a/node_modules/cache/.licenses/npm/form-data-2.5.1.dep.yml b/node_modules/cache/.licenses/npm/form-data-2.5.1.dep.yml new file mode 100644 index 00000000..000f2223 --- /dev/null +++ b/node_modules/cache/.licenses/npm/form-data-2.5.1.dep.yml @@ -0,0 +1,33 @@ +--- +name: form-data +version: 2.5.1 +type: npm +summary: A library to create readable "multipart/form-data" streams. Can be used to + submit forms and file uploads to other web applications. +homepage: https://github.com/form-data/form-data#readme +license: mit +licenses: +- sources: License + text: | + Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: README.md + text: Form-Data is released under the [MIT](License) license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/form-data-3.0.1.dep.yml b/node_modules/cache/.licenses/npm/form-data-3.0.1.dep.yml new file mode 100644 index 00000000..11733679 --- /dev/null +++ b/node_modules/cache/.licenses/npm/form-data-3.0.1.dep.yml @@ -0,0 +1,33 @@ +--- +name: form-data +version: 3.0.1 +type: npm +summary: A library to create readable "multipart/form-data" streams. Can be used to + submit forms and file uploads to other web applications. +homepage: https://github.com/form-data/form-data#readme +license: mit +licenses: +- sources: License + text: | + Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: Readme.md + text: Form-Data is released under the [MIT](License) license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/form-data-4.0.0.dep.yml b/node_modules/cache/.licenses/npm/form-data-4.0.0.dep.yml new file mode 100644 index 00000000..ced7212c --- /dev/null +++ b/node_modules/cache/.licenses/npm/form-data-4.0.0.dep.yml @@ -0,0 +1,33 @@ +--- +name: form-data +version: 4.0.0 +type: npm +summary: A library to create readable "multipart/form-data" streams. Can be used to + submit forms and file uploads to other web applications. +homepage: https://github.com/form-data/form-data#readme +license: mit +licenses: +- sources: License + text: | + Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: Readme.md + text: Form-Data is released under the [MIT](License) license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/ip-regex.dep.yml b/node_modules/cache/.licenses/npm/ip-regex.dep.yml new file mode 100644 index 00000000..95d4b6b5 --- /dev/null +++ b/node_modules/cache/.licenses/npm/ip-regex.dep.yml @@ -0,0 +1,34 @@ +--- +name: ip-regex +version: 2.1.0 +type: npm +summary: Regular expression for matching IP addresses (IPv4 & IPv6) +homepage: https://github.com/sindresorhus/ip-regex#readme +license: mit +licenses: +- sources: license + text: | + The MIT License (MIT) + + Copyright (c) Sindre Sorhus (sindresorhus.com) + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: readme.md + text: MIT © [Sindre Sorhus](https://sindresorhus.com) +notices: [] diff --git a/node_modules/cache/.licenses/npm/mime-db.dep.yml b/node_modules/cache/.licenses/npm/mime-db.dep.yml new file mode 100644 index 00000000..28fe4bb0 --- /dev/null +++ b/node_modules/cache/.licenses/npm/mime-db.dep.yml @@ -0,0 +1,33 @@ +--- +name: mime-db +version: 1.51.0 +type: npm +summary: Media Type Database +homepage: https://github.com/jshttp/mime-db#readme +license: mit +licenses: +- sources: LICENSE + text: |2 + + The MIT License (MIT) + + Copyright (c) 2014 Jonathan Ong me@jongleberry.com + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/mime-types.dep.yml b/node_modules/cache/.licenses/npm/mime-types.dep.yml new file mode 100644 index 00000000..f7d509ca --- /dev/null +++ b/node_modules/cache/.licenses/npm/mime-types.dep.yml @@ -0,0 +1,47 @@ +--- +name: mime-types +version: 2.1.34 +type: npm +summary: The ultimate javascript content-type utility. +homepage: https://github.com/jshttp/mime-types#readme +license: mit +licenses: +- sources: LICENSE + text: | + (The MIT License) + + Copyright (c) 2014 Jonathan Ong + Copyright (c) 2015 Douglas Christopher Wilson + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + 'Software'), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: README.md + text: |- + [MIT](LICENSE) + + [ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci + [ci-url]: https://github.com/jshttp/mime-types/actions?query=workflow%3Aci + [coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master + [coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master + [node-version-image]: https://badgen.net/npm/node/mime-types + [node-version-url]: https://nodejs.org/en/download + [npm-downloads-image]: https://badgen.net/npm/dm/mime-types + [npm-url]: https://npmjs.org/package/mime-types + [npm-version-image]: https://badgen.net/npm/v/mime-types +notices: [] diff --git a/node_modules/cache/.licenses/npm/minimatch.dep.yml b/node_modules/cache/.licenses/npm/minimatch.dep.yml new file mode 100644 index 00000000..317e4bc8 --- /dev/null +++ b/node_modules/cache/.licenses/npm/minimatch.dep.yml @@ -0,0 +1,26 @@ +--- +name: minimatch +version: 3.0.4 +type: npm +summary: a glob matcher in javascript +homepage: https://github.com/isaacs/minimatch#readme +license: isc +licenses: +- sources: LICENSE + text: | + The ISC License + + Copyright (c) Isaac Z. Schlueter and Contributors + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR + IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/node-fetch.dep.yml b/node_modules/cache/.licenses/npm/node-fetch.dep.yml new file mode 100644 index 00000000..b49a78a1 --- /dev/null +++ b/node_modules/cache/.licenses/npm/node-fetch.dep.yml @@ -0,0 +1,56 @@ +--- +name: node-fetch +version: 2.6.7 +type: npm +summary: A light-weight module that brings window.fetch to node.js +homepage: https://github.com/bitinn/node-fetch +license: mit +licenses: +- sources: LICENSE.md + text: |+ + The MIT License (MIT) + + Copyright (c) 2016 David Frank + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +- sources: README.md + text: |- + MIT + + [npm-image]: https://flat.badgen.net/npm/v/node-fetch + [npm-url]: https://www.npmjs.com/package/node-fetch + [travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch + [travis-url]: https://travis-ci.org/bitinn/node-fetch + [codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master + [codecov-url]: https://codecov.io/gh/bitinn/node-fetch + [install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch + [install-size-url]: https://packagephobia.now.sh/result?p=node-fetch + [discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square + [discord-url]: https://discord.gg/Zxbndcm + [opencollective-image]: https://opencollective.com/node-fetch/backers.svg + [opencollective-url]: https://opencollective.com/node-fetch + [whatwg-fetch]: https://fetch.spec.whatwg.org/ + [response-init]: https://fetch.spec.whatwg.org/#responseinit + [node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams + [mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers + [LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md + [ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md + [UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md +notices: [] diff --git a/node_modules/cache/.licenses/npm/process.dep.yml b/node_modules/cache/.licenses/npm/process.dep.yml new file mode 100644 index 00000000..2bf50598 --- /dev/null +++ b/node_modules/cache/.licenses/npm/process.dep.yml @@ -0,0 +1,33 @@ +--- +name: process +version: 0.11.10 +type: npm +summary: process information for node.js and browsers +homepage: https://github.com/shtylman/node-process#readme +license: mit +licenses: +- sources: LICENSE + text: | + (The MIT License) + + Copyright (c) 2013 Roman Shtylman + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + 'Software'), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/psl.dep.yml b/node_modules/cache/.licenses/npm/psl.dep.yml new file mode 100644 index 00000000..385e9aac --- /dev/null +++ b/node_modules/cache/.licenses/npm/psl.dep.yml @@ -0,0 +1,43 @@ +--- +name: psl +version: 1.8.0 +type: npm +summary: Domain name parser based on the Public Suffix List +homepage: https://github.com/lupomontero/psl#readme +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2017 Lupo Montero lupomontero@gmail.com + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: README.md + text: |- + The MIT License (MIT) + + Copyright (c) 2017 Lupo Montero + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/punycode.dep.yml b/node_modules/cache/.licenses/npm/punycode.dep.yml new file mode 100644 index 00000000..4a9547e6 --- /dev/null +++ b/node_modules/cache/.licenses/npm/punycode.dep.yml @@ -0,0 +1,34 @@ +--- +name: punycode +version: 2.1.1 +type: npm +summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, + and works on nearly all JavaScript platforms. +homepage: https://mths.be/punycode +license: mit +licenses: +- sources: LICENSE-MIT.txt + text: | + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: README.md + text: Punycode.js is available under the [MIT](https://mths.be/mit) license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/sax.dep.yml b/node_modules/cache/.licenses/npm/sax.dep.yml new file mode 100644 index 00000000..e964a174 --- /dev/null +++ b/node_modules/cache/.licenses/npm/sax.dep.yml @@ -0,0 +1,52 @@ +--- +name: sax +version: 1.2.4 +type: npm +summary: An evented streaming XML parser in JavaScript +homepage: https://github.com/isaacs/sax-js#readme +license: other +licenses: +- sources: LICENSE + text: | + The ISC License + + Copyright (c) Isaac Z. Schlueter and Contributors + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR + IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ==== + + `String.fromCodePoint` by Mathias Bynens used according to terms of MIT + License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/semver.dep.yml b/node_modules/cache/.licenses/npm/semver.dep.yml new file mode 100644 index 00000000..8c62b4ff --- /dev/null +++ b/node_modules/cache/.licenses/npm/semver.dep.yml @@ -0,0 +1,26 @@ +--- +name: semver +version: 6.3.0 +type: npm +summary: The semantic version parser used by npm. +homepage: https://github.com/npm/node-semver#readme +license: isc +licenses: +- sources: LICENSE + text: | + The ISC License + + Copyright (c) Isaac Z. Schlueter and Contributors + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR + IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/tough-cookie-3.0.1.dep.yml b/node_modules/cache/.licenses/npm/tough-cookie-3.0.1.dep.yml new file mode 100644 index 00000000..1496c109 --- /dev/null +++ b/node_modules/cache/.licenses/npm/tough-cookie-3.0.1.dep.yml @@ -0,0 +1,23 @@ +--- +name: tough-cookie +version: 3.0.1 +type: npm +summary: RFC6265 Cookies and Cookie Jar for node.js +homepage: https://github.com/salesforce/tough-cookie +license: bsd-3-clause +licenses: +- sources: LICENSE + text: | + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/tough-cookie-4.0.0.dep.yml b/node_modules/cache/.licenses/npm/tough-cookie-4.0.0.dep.yml new file mode 100644 index 00000000..71ee4750 --- /dev/null +++ b/node_modules/cache/.licenses/npm/tough-cookie-4.0.0.dep.yml @@ -0,0 +1,23 @@ +--- +name: tough-cookie +version: 4.0.0 +type: npm +summary: RFC6265 Cookies and Cookie Jar for node.js +homepage: https://github.com/salesforce/tough-cookie +license: bsd-3-clause +licenses: +- sources: LICENSE + text: | + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/tr46.dep.yml b/node_modules/cache/.licenses/npm/tr46.dep.yml new file mode 100644 index 00000000..3bacc6ec --- /dev/null +++ b/node_modules/cache/.licenses/npm/tr46.dep.yml @@ -0,0 +1,30 @@ +--- +name: tr46 +version: 0.0.3 +type: npm +summary: An implementation of the Unicode TR46 spec +homepage: https://github.com/Sebmaster/tr46.js#readme +license: mit +licenses: +- sources: Auto-generated MIT license text + text: | + MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/tslib-1.14.1.dep.yml b/node_modules/cache/.licenses/npm/tslib-1.14.1.dep.yml new file mode 100644 index 00000000..10b2c2ac --- /dev/null +++ b/node_modules/cache/.licenses/npm/tslib-1.14.1.dep.yml @@ -0,0 +1,35 @@ +--- +name: tslib +version: 1.14.1 +type: npm +summary: Runtime library for TypeScript helper functions +homepage: https://www.typescriptlang.org/ +license: 0bsd +licenses: +- sources: LICENSE.txt + text: |- + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. +notices: +- sources: CopyrightNotice.txt + text: "/*! *****************************************************************************\r\nCopyright + (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute + this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE + SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD + TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR + CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, + DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS + ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS + SOFTWARE.\r\n***************************************************************************** + */" diff --git a/node_modules/cache/.licenses/npm/tslib-2.3.1.dep.yml b/node_modules/cache/.licenses/npm/tslib-2.3.1.dep.yml new file mode 100644 index 00000000..a6d5be44 --- /dev/null +++ b/node_modules/cache/.licenses/npm/tslib-2.3.1.dep.yml @@ -0,0 +1,35 @@ +--- +name: tslib +version: 2.3.1 +type: npm +summary: Runtime library for TypeScript helper functions +homepage: https://www.typescriptlang.org/ +license: 0bsd +licenses: +- sources: LICENSE.txt + text: |- + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. +notices: +- sources: CopyrightNotice.txt + text: "/*! *****************************************************************************\r\nCopyright + (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute + this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE + SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD + TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. + IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR + CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, + DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS + ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS + SOFTWARE.\r\n***************************************************************************** + */" diff --git a/node_modules/cache/.licenses/npm/tunnel.dep.yml b/node_modules/cache/.licenses/npm/tunnel.dep.yml new file mode 100644 index 00000000..9a7111da --- /dev/null +++ b/node_modules/cache/.licenses/npm/tunnel.dep.yml @@ -0,0 +1,35 @@ +--- +name: tunnel +version: 0.0.6 +type: npm +summary: Node HTTP/HTTPS Agents for tunneling proxies +homepage: https://github.com/koichik/node-tunnel/ +license: mit +licenses: +- sources: LICENSE + text: | + The MIT License (MIT) + + Copyright (c) 2012 Koichi Kobayashi + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +- sources: README.md + text: Licensed under the [MIT](https://github.com/koichik/node-tunnel/blob/master/LICENSE) + license. +notices: [] diff --git a/node_modules/cache/.licenses/npm/universalify.dep.yml b/node_modules/cache/.licenses/npm/universalify.dep.yml new file mode 100644 index 00000000..0a1c4cde --- /dev/null +++ b/node_modules/cache/.licenses/npm/universalify.dep.yml @@ -0,0 +1,33 @@ +--- +name: universalify +version: 0.1.2 +type: npm +summary: Make a callback- or promise-based function support both promises and callbacks. +homepage: https://github.com/RyanZim/universalify#readme +license: mit +licenses: +- sources: LICENSE + text: | + (The MIT License) + + Copyright (c) 2017, Ryan Zimmerman + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the 'Software'), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +- sources: README.md + text: MIT +notices: [] diff --git a/node_modules/cache/.licenses/npm/uuid-3.4.0.dep.yml b/node_modules/cache/.licenses/npm/uuid-3.4.0.dep.yml new file mode 100644 index 00000000..45970fef --- /dev/null +++ b/node_modules/cache/.licenses/npm/uuid-3.4.0.dep.yml @@ -0,0 +1,39 @@ +--- +name: uuid +version: 3.4.0 +type: npm +summary: RFC4122 (v1, v4, and v5) UUIDs +homepage: https://github.com/uuidjs/uuid#readme +license: mit +licenses: +- sources: LICENSE.md + text: | + The MIT License (MIT) + + Copyright (c) 2010-2016 Robert Kieffer and other contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. +notices: +- sources: AUTHORS + text: |- + Robert Kieffer + Christoph Tavan + AJ ONeal + Vincent Voyer + Roman Shtylman diff --git a/node_modules/cache/.licenses/npm/uuid-8.3.2.dep.yml b/node_modules/cache/.licenses/npm/uuid-8.3.2.dep.yml new file mode 100644 index 00000000..bf84da08 --- /dev/null +++ b/node_modules/cache/.licenses/npm/uuid-8.3.2.dep.yml @@ -0,0 +1,20 @@ +--- +name: uuid +version: 8.3.2 +type: npm +summary: RFC4122 (v1, v4, and v5) UUIDs +homepage: https://github.com/uuidjs/uuid#readme +license: mit +licenses: +- sources: LICENSE.md + text: | + The MIT License (MIT) + + Copyright (c) 2010-2020 Robert Kieffer and other contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/webidl-conversions.dep.yml b/node_modules/cache/.licenses/npm/webidl-conversions.dep.yml new file mode 100644 index 00000000..48c1f227 --- /dev/null +++ b/node_modules/cache/.licenses/npm/webidl-conversions.dep.yml @@ -0,0 +1,23 @@ +--- +name: webidl-conversions +version: 3.0.1 +type: npm +summary: Implements the WebIDL algorithms for converting to and from JavaScript values +homepage: https://github.com/jsdom/webidl-conversions#readme +license: bsd-2-clause +licenses: +- sources: LICENSE.md + text: | + # The BSD 2-Clause License + + Copyright (c) 2014, Domenic Denicola + All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/whatwg-url.dep.yml b/node_modules/cache/.licenses/npm/whatwg-url.dep.yml new file mode 100644 index 00000000..bca799cc --- /dev/null +++ b/node_modules/cache/.licenses/npm/whatwg-url.dep.yml @@ -0,0 +1,32 @@ +--- +name: whatwg-url +version: 5.0.0 +type: npm +summary: An implementation of the WHATWG URL Standard's URL API and parsing machinery +homepage: https://github.com/jsdom/whatwg-url#readme +license: mit +licenses: +- sources: LICENSE.txt + text: | + The MIT License (MIT) + + Copyright (c) 2015–2016 Sebastian Mayr + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/xml2js.dep.yml b/node_modules/cache/.licenses/npm/xml2js.dep.yml new file mode 100644 index 00000000..ea43d8df --- /dev/null +++ b/node_modules/cache/.licenses/npm/xml2js.dep.yml @@ -0,0 +1,30 @@ +--- +name: xml2js +version: 0.4.23 +type: npm +summary: Simple XML to JavaScript object converter. +homepage: https://github.com/Leonidas-from-XIV/node-xml2js +license: mit +licenses: +- sources: LICENSE + text: | + Copyright 2010, 2011, 2012, 2013. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal in the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. +notices: [] diff --git a/node_modules/cache/.licenses/npm/xmlbuilder.dep.yml b/node_modules/cache/.licenses/npm/xmlbuilder.dep.yml new file mode 100644 index 00000000..2198584c --- /dev/null +++ b/node_modules/cache/.licenses/npm/xmlbuilder.dep.yml @@ -0,0 +1,24 @@ +--- +name: xmlbuilder +version: 11.0.1 +type: npm +summary: An XML builder for node.js +homepage: http://github.com/oozcitak/xmlbuilder-js +license: mit +licenses: +- sources: LICENSE + text: "The MIT License (MIT)\r\n\r\nCopyright (c) 2013 Ozgur Ozcitak\r\n\r\nPermission + is hereby granted, free of charge, to any person obtaining a copy\r\nof this software + and associated documentation files (the \"Software\"), to deal\r\nin the Software + without restriction, including without limitation the rights\r\nto use, copy, + modify, merge, publish, distribute, sublicense, and/or sell\r\ncopies of the Software, + and to permit persons to whom the Software is\r\nfurnished to do so, subject to + the following conditions:\r\n\r\nThe above copyright notice and this permission + notice shall be included in\r\nall copies or substantial portions of the Software.\r\n\r\nTHE + SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\r\nIMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\r\nFITNESS FOR + A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\r\nAUTHORS OR + COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\r\nLIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\r\nOUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\r\nTHE SOFTWARE.\r\n" +notices: [] diff --git a/node_modules/cache/.prettierrc.json b/node_modules/cache/.prettierrc.json new file mode 100644 index 00000000..eec6f1c1 --- /dev/null +++ b/node_modules/cache/.prettierrc.json @@ -0,0 +1,11 @@ +{ + "printWidth": 80, + "tabWidth": 4, + "useTabs": false, + "semi": true, + "singleQuote": false, + "trailingComma": "none", + "bracketSpacing": true, + "arrowParens": "avoid", + "parser": "typescript" +} \ No newline at end of file diff --git a/node_modules/cache/CODE_OF_CONDUCT.md b/node_modules/cache/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..a0047d87 --- /dev/null +++ b/node_modules/cache/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at opensource+actions/cache@github.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/node_modules/cache/CONTRIBUTING.md b/node_modules/cache/CONTRIBUTING.md new file mode 100644 index 00000000..c9cfb0a0 --- /dev/null +++ b/node_modules/cache/CONTRIBUTING.md @@ -0,0 +1,38 @@ +## Contributing + +[fork]: https://github.com/actions/cache/fork +[pr]: https://github.com/actions/cache/compare +[style]: https://github.com/styleguide/js +[code-of-conduct]: CODE_OF_CONDUCT.md + +Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. + +Contributions to this project are [released](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license) to the public under the [project's open source license](LICENSE). + +Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms. + +## Submitting a pull request + +1. [Fork][fork] and clone the repository +2. Configure and install the dependencies: `npm install` +3. Make sure the tests pass on your machine: `npm run test` +4. Create a new branch: `git checkout -b my-branch-name` +5. Make your change, add tests, and make sure the tests still pass +6. Push to your fork and [submit a pull request][pr] +7. Pat your self on the back and wait for your pull request to be reviewed and merged. + +Here are a few things you can do that will increase the likelihood of your pull request being accepted: + +- Write tests. +- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests. +- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html). + +## Licensed + +This repository uses a tool called [Licensed](https://github.com/github/licensed) to verify third party dependencies. You may need to locally install licensed and run `licensed cache` to update the dependency cache if you install or update a production dependency. If licensed cache is unable to determine the dependency, you may need to modify the cache file yourself to put the correct license. You should still verify the dependency, licensed is a tool to help, but is not a substitute for human review of dependencies. + +## Resources + +- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/) +- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/) +- [GitHub Help](https://help.github.com) \ No newline at end of file diff --git a/node_modules/cache/LICENSE b/node_modules/cache/LICENSE new file mode 100644 index 00000000..a426ef25 --- /dev/null +++ b/node_modules/cache/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2018 GitHub, Inc. and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/cache/README.md b/node_modules/cache/README.md new file mode 100644 index 00000000..a4c8f1b0 --- /dev/null +++ b/node_modules/cache/README.md @@ -0,0 +1,184 @@ +# cache + +This action allows caching dependencies and build outputs to improve workflow execution time. + +[![Tests](https://github.com/actions/cache/actions/workflows/workflow.yml/badge.svg)](https://github.com/actions/cache/actions/workflows/workflow.yml) + +## Documentation + +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows). + +## What's New +### v3 +* Added support for caching from GHES 3.5. +* Fixed download issue for files > 2GB during restore. +* Updated the minimum runner version support from node 12 -> node 16. + +### v2 +* Increased the cache size limit to 10 GB. +* Added support for multiple paths, [glob patterns](https://github.com/actions/toolkit/tree/main/packages/glob), and single file caches. + +```yaml +- name: Cache multiple paths + uses: actions/cache@v3 + with: + path: | + ~/cache + !~/cache/exclude + key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} +``` + +* Increased performance and improved cache sizes using `zstd` compression for Linux and macOS runners +* Allowed caching for all events with a ref. See [events that trigger workflow](https://help.github.com/en/actions/reference/events-that-trigger-workflows) for info on which events do not have a `GITHUB_REF` +* Released the [`@actions/cache`](https://github.com/actions/toolkit/tree/main/packages/cache) npm package to allow other actions to utilize caching +* Added a best-effort cleanup step to delete the archive after extraction to reduce storage space + +Refer [here](https://github.com/actions/cache/blob/v1/README.md) for previous versions + +## Usage + +### Pre-requisites +Create a workflow `.yml` file in your repositories `.github/workflows` directory. An [example workflow](#example-workflow) is available below. For more information, reference the GitHub Help Documentation for [Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file). + +If you are using this inside a container, a POSIX-compliant `tar` needs to be included and accessible in the execution path. + +### Inputs + +* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns. +* `key` - An explicit key for restoring and saving the cache +* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key + +### Outputs + +* `cache-hit` - A boolean value to indicate an exact match was found for the key + +> See [Skipping steps based on cache-hit](#Skipping-steps-based-on-cache-hit) for info on using this output + +### Cache scopes +The cache is scoped to the key and branch. The default branch cache is available to other branches. + +See [Matching a cache key](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key) for more info. + +### Example workflow + +```yaml +name: Caching Primes + +on: push + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Cache Primes + id: cache-primes + uses: actions/cache@v3 + with: + path: prime-numbers + key: ${{ runner.os }}-primes + + - name: Generate Prime Numbers + if: steps.cache-primes.outputs.cache-hit != 'true' + run: /generate-primes.sh -d prime-numbers + + - name: Use Prime Numbers + run: /primes.sh -d prime-numbers +``` + +## Implementation Examples + +Every programming language and framework has its own way of caching. + +See [Examples](examples.md) for a list of `actions/cache` implementations for use with: + +- [C# - NuGet](./examples.md#c---nuget) +- [D - DUB](./examples.md#d---dub) +- [Deno](./examples.md#deno) +- [Elixir - Mix](./examples.md#elixir---mix) +- [Go - Modules](./examples.md#go---modules) +- [Haskell - Cabal](./examples.md#haskell---cabal) +- [Haskell - Stack](./examples.md#haskell---stack) +- [Java - Gradle](./examples.md#java---gradle) +- [Java - Maven](./examples.md#java---maven) +- [Node - npm](./examples.md#node---npm) +- [Node - Lerna](./examples.md#node---lerna) +- [Node - Yarn](./examples.md#node---yarn) +- [OCaml/Reason - esy](./examples.md#ocamlreason---esy) +- [PHP - Composer](./examples.md#php---composer) +- [Python - pip](./examples.md#python---pip) +- [Python - pipenv](./examples.md#python---pipenv) +- [R - renv](./examples.md#r---renv) +- [Ruby - Bundler](./examples.md#ruby---bundler) +- [Rust - Cargo](./examples.md#rust---cargo) +- [Scala - SBT](./examples.md#scala---sbt) +- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) +- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) +- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager) + +## Creating a cache key + +A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions. + +For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change. + +```yaml + - uses: actions/cache@v3 + with: + path: | + path/to/dependencies + some/other/dependencies + key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} +``` + +Additionally, you can use arbitrary command output in a cache key, such as a date or software version: + +```yaml + # http://man7.org/linux/man-pages/man1/date.1.html + - name: Get Date + id: get-date + run: | + echo "::set-output name=date::$(/bin/date -u "+%Y%m%d")" + shell: bash + + - uses: actions/cache@v3 + with: + path: path/to/dependencies + key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }} +``` + +See [Using contexts to create cache keys](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#using-contexts-to-create-cache-keys) + +## Cache Limits + +A repository can have up to 10GB of caches. Once the 10GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. + +## Skipping steps based on cache-hit + +Using the `cache-hit` output, subsequent steps (such as install or build) can be skipped when a cache hit occurs on the key. + +Example: +```yaml +steps: + - uses: actions/checkout@v3 + + - uses: actions/cache@v3 + id: cache + with: + path: path/to/dependencies + key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} + + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: /install.sh +``` + +> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`) + +## Contributing +We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information. + +## License +The scripts and documentation in this project are released under the [MIT License](LICENSE) diff --git a/node_modules/cache/RELEASES.md b/node_modules/cache/RELEASES.md new file mode 100644 index 00000000..f1285e73 --- /dev/null +++ b/node_modules/cache/RELEASES.md @@ -0,0 +1,8 @@ +# Releases + +### 3.0.0 +- Updated minimum runner version support from node 12 -> node 16 + +### 3.0.1 +- Added support for caching from GHES 3.5. +- Fixed download issue for files > 2GB during restore. diff --git a/node_modules/cache/__tests__/__fixtures__/helloWorld.txt b/node_modules/cache/__tests__/__fixtures__/helloWorld.txt new file mode 100644 index 00000000..95d09f2b --- /dev/null +++ b/node_modules/cache/__tests__/__fixtures__/helloWorld.txt @@ -0,0 +1 @@ +hello world \ No newline at end of file diff --git a/node_modules/cache/__tests__/actionUtils.test.ts b/node_modules/cache/__tests__/actionUtils.test.ts new file mode 100644 index 00000000..e36a6a52 --- /dev/null +++ b/node_modules/cache/__tests__/actionUtils.test.ts @@ -0,0 +1,274 @@ +import * as cache from "@actions/cache"; +import * as core from "@actions/core"; + +import { Events, Outputs, RefKey, State } from "../src/constants"; +import * as actionUtils from "../src/utils/actionUtils"; +import * as testUtils from "../src/utils/testUtils"; + +jest.mock("@actions/core"); +jest.mock("@actions/cache"); + +beforeAll(() => { + jest.spyOn(core, "getInput").mockImplementation((name, options) => { + return jest.requireActual("@actions/core").getInput(name, options); + }); +}); + +afterEach(() => { + delete process.env[Events.Key]; + delete process.env[RefKey]; +}); + +test("isGhes returns true if server url is not github.com", () => { + try { + process.env["GITHUB_SERVER_URL"] = "http://example.com"; + expect(actionUtils.isGhes()).toBe(true); + } finally { + process.env["GITHUB_SERVER_URL"] = undefined; + } +}); + +test("isGhes returns false when server url is github.com", () => { + try { + process.env["GITHUB_SERVER_URL"] = "http://github.com"; + expect(actionUtils.isGhes()).toBe(false); + } finally { + process.env["GITHUB_SERVER_URL"] = undefined; + } +}); + +test("isExactKeyMatch with undefined cache key returns false", () => { + const key = "linux-rust"; + const cacheKey = undefined; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); +}); + +test("isExactKeyMatch with empty cache key returns false", () => { + const key = "linux-rust"; + const cacheKey = ""; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); +}); + +test("isExactKeyMatch with different keys returns false", () => { + const key = "linux-rust"; + const cacheKey = "linux-"; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); +}); + +test("isExactKeyMatch with different key accents returns false", () => { + const key = "linux-áccent"; + const cacheKey = "linux-accent"; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false); +}); + +test("isExactKeyMatch with same key returns true", () => { + const key = "linux-rust"; + const cacheKey = "linux-rust"; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true); +}); + +test("isExactKeyMatch with same key and different casing returns true", () => { + const key = "linux-rust"; + const cacheKey = "LINUX-RUST"; + + expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true); +}); + +test("setOutputAndState with undefined entry to set cache-hit output", () => { + const key = "linux-rust"; + const cacheKey = undefined; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheKey); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledTimes(0); +}); + +test("setOutputAndState with exact match to set cache-hit output and state", () => { + const key = "linux-rust"; + const cacheKey = "linux-rust"; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheKey); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey); + expect(saveStateMock).toHaveBeenCalledTimes(1); +}); + +test("setOutputAndState with no exact match to set cache-hit output and state", () => { + const key = "linux-rust"; + const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheKey); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey); + expect(saveStateMock).toHaveBeenCalledTimes(1); +}); + +test("getCacheState with no state returns undefined", () => { + const getStateMock = jest.spyOn(core, "getState"); + getStateMock.mockImplementation(() => { + return ""; + }); + + const state = actionUtils.getCacheState(); + + expect(state).toBe(undefined); + + expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey); + expect(getStateMock).toHaveBeenCalledTimes(1); +}); + +test("getCacheState with valid state", () => { + const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + + const getStateMock = jest.spyOn(core, "getState"); + getStateMock.mockImplementation(() => { + return cacheKey; + }); + + const state = actionUtils.getCacheState(); + + expect(state).toEqual(cacheKey); + + expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey); + expect(getStateMock).toHaveBeenCalledTimes(1); +}); + +test("logWarning logs a message with a warning prefix", () => { + const message = "A warning occurred."; + + const infoMock = jest.spyOn(core, "info"); + + actionUtils.logWarning(message); + + expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); +}); + +test("isValidEvent returns false for event that does not have a branch or tag", () => { + const event = "foo"; + process.env[Events.Key] = event; + + const isValidEvent = actionUtils.isValidEvent(); + + expect(isValidEvent).toBe(false); +}); + +test("isValidEvent returns true for event that has a ref", () => { + const event = Events.Push; + process.env[Events.Key] = event; + process.env[RefKey] = "ref/heads/feature"; + + const isValidEvent = actionUtils.isValidEvent(); + + expect(isValidEvent).toBe(true); +}); + +test("getInputAsArray returns empty array if not required and missing", () => { + expect(actionUtils.getInputAsArray("foo")).toEqual([]); +}); + +test("getInputAsArray throws error if required and missing", () => { + expect(() => + actionUtils.getInputAsArray("foo", { required: true }) + ).toThrowError(); +}); + +test("getInputAsArray handles single line correctly", () => { + testUtils.setInput("foo", "bar"); + expect(actionUtils.getInputAsArray("foo")).toEqual(["bar"]); +}); + +test("getInputAsArray handles multiple lines correctly", () => { + testUtils.setInput("foo", "bar\nbaz"); + expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]); +}); + +test("getInputAsArray handles different new lines correctly", () => { + testUtils.setInput("foo", "bar\r\nbaz"); + expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]); +}); + +test("getInputAsArray handles empty lines correctly", () => { + testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n"); + expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]); +}); + +test("getInputAsInt returns undefined if input not set", () => { + expect(actionUtils.getInputAsInt("undefined")).toBeUndefined(); +}); + +test("getInputAsInt returns value if input is valid", () => { + testUtils.setInput("foo", "8"); + expect(actionUtils.getInputAsInt("foo")).toBe(8); +}); + +test("getInputAsInt returns undefined if input is invalid or NaN", () => { + testUtils.setInput("foo", "bar"); + expect(actionUtils.getInputAsInt("foo")).toBeUndefined(); +}); + +test("getInputAsInt throws if required and value missing", () => { + expect(() => + actionUtils.getInputAsInt("undefined", { required: true }) + ).toThrowError(); +}); + +test("isCacheFeatureAvailable for ac enabled", () => { + jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => true); + + expect(actionUtils.isCacheFeatureAvailable()).toBe(true); +}); + +test("isCacheFeatureAvailable for ac disabled on GHES", () => { + jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => false); + + const message = + "Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not."; + const infoMock = jest.spyOn(core, "info"); + + try { + process.env["GITHUB_SERVER_URL"] = "http://example.com"; + expect(actionUtils.isCacheFeatureAvailable()).toBe(false); + expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); + } finally { + delete process.env["GITHUB_SERVER_URL"]; + } +}); + +test("isCacheFeatureAvailable for ac disabled on dotcom", () => { + jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => false); + + const message = + "An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."; + const infoMock = jest.spyOn(core, "info"); + + try { + process.env["GITHUB_SERVER_URL"] = "http://github.com"; + expect(actionUtils.isCacheFeatureAvailable()).toBe(false); + expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); + } finally { + delete process.env["GITHUB_SERVER_URL"]; + } +}); diff --git a/node_modules/cache/__tests__/create-cache-files.sh b/node_modules/cache/__tests__/create-cache-files.sh new file mode 100755 index 00000000..0ce4140f --- /dev/null +++ b/node_modules/cache/__tests__/create-cache-files.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +path="$2" +if [ -z "$path" ]; then + echo "Must supply path argument" + exit 1 +fi + +mkdir -p $path +echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt diff --git a/node_modules/cache/__tests__/restore.test.ts b/node_modules/cache/__tests__/restore.test.ts new file mode 100644 index 00000000..b4dbba9e --- /dev/null +++ b/node_modules/cache/__tests__/restore.test.ts @@ -0,0 +1,360 @@ +import * as cache from "@actions/cache"; +import * as core from "@actions/core"; + +import { Events, Inputs, RefKey } from "../src/constants"; +import run from "../src/restore"; +import * as actionUtils from "../src/utils/actionUtils"; +import * as testUtils from "../src/utils/testUtils"; + +jest.mock("../src/utils/actionUtils"); + +beforeAll(() => { + jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( + (key, cacheResult) => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isExactKeyMatch(key, cacheResult); + } + ); + + jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isValidEvent(); + }); + + jest.spyOn(actionUtils, "getInputAsArray").mockImplementation( + (name, options) => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.getInputAsArray(name, options); + } + ); +}); + +beforeEach(() => { + process.env[Events.Key] = Events.Push; + process.env[RefKey] = "refs/heads/feature-branch"; + + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false); + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => true + ); +}); + +afterEach(() => { + testUtils.clearInputs(); + delete process.env[Events.Key]; + delete process.env[RefKey]; +}); + +test("restore with invalid event outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + const invalidEvent = "commit_comment"; + process.env[Events.Key] = invalidEvent; + delete process.env[RefKey]; + await run(); + expect(logWarningMock).toHaveBeenCalledWith( + `Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.` + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore without AC available should no-op", async () => { + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false); + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => false + ); + + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(0); + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); +}); + +test("restore on GHES without AC available should no-op", async () => { + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true); + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => false + ); + + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(0); + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); +}); + +test("restore on GHES with AC available ", async () => { + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true); + const path = "node_modules"; + const key = "node-test"; + testUtils.setInputs({ + path: path, + key + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(key); + }); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with no path should fail", async () => { + const failedMock = jest.spyOn(core, "setFailed"); + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + await run(); + expect(restoreCacheMock).toHaveBeenCalledTimes(0); + // this input isn't necessary for restore b/c tarball contains entries relative to workspace + expect(failedMock).not.toHaveBeenCalledWith( + "Input required and not supplied: path" + ); +}); + +test("restore with no key", async () => { + testUtils.setInput(Inputs.Path, "node_modules"); + const failedMock = jest.spyOn(core, "setFailed"); + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + await run(); + expect(restoreCacheMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledWith( + "Input required and not supplied: key" + ); +}); + +test("restore with too many keys should fail", async () => { + const path = "node_modules"; + const key = "node-test"; + const restoreKeys = [...Array(20).keys()].map(x => x.toString()); + testUtils.setInputs({ + path: path, + key, + restoreKeys + }); + const failedMock = jest.spyOn(core, "setFailed"); + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + await run(); + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: Keys are limited to a maximum of 10.` + ); +}); + +test("restore with large key should fail", async () => { + const path = "node_modules"; + const key = "foo".repeat(512); // Over the 512 character limit + testUtils.setInputs({ + path: path, + key + }); + const failedMock = jest.spyOn(core, "setFailed"); + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + await run(); + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ); +}); + +test("restore with invalid key should fail", async () => { + const path = "node_modules"; + const key = "comma,comma"; + testUtils.setInputs({ + path: path, + key + }); + const failedMock = jest.spyOn(core, "setFailed"); + const restoreCacheMock = jest.spyOn(cache, "restoreCache"); + await run(); + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot contain commas.` + ); +}); + +test("restore with no cache found", async () => { + const path = "node_modules"; + const key = "node-test"; + testUtils.setInputs({ + path: path, + key + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(undefined); + }); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(failedMock).toHaveBeenCalledTimes(0); + + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}` + ); +}); + +test("restore with server error should fail", async () => { + const path = "node_modules"; + const key = "node-test"; + testUtils.setInputs({ + path: path, + key + }); + + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + throw new Error("HTTP Error Occurred"); + }); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with restore keys and no cache found", async () => { + const path = "node_modules"; + const key = "node-test"; + const restoreKey = "node-"; + testUtils.setInputs({ + path: path, + key, + restoreKeys: [restoreKey] + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(undefined); + }); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(failedMock).toHaveBeenCalledTimes(0); + + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}, ${restoreKey}` + ); +}); + +test("restore with cache found for key", async () => { + const path = "node_modules"; + const key = "node-test"; + testUtils.setInputs({ + path: path, + key + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(key); + }); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with cache found for restore key", async () => { + const path = "node_modules"; + const key = "node-test"; + const restoreKey = "node-"; + testUtils.setInputs({ + path: path, + key, + restoreKeys: [restoreKey] + }); + + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + const restoreCacheMock = jest + .spyOn(cache, "restoreCache") + .mockImplementationOnce(() => { + return Promise.resolve(restoreKey); + }); + + await run(); + + expect(restoreCacheMock).toHaveBeenCalledTimes(1); + expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + + expect(infoMock).toHaveBeenCalledWith( + `Cache restored from key: ${restoreKey}` + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); diff --git a/node_modules/cache/__tests__/save.test.ts b/node_modules/cache/__tests__/save.test.ts new file mode 100644 index 00000000..7598e0c5 --- /dev/null +++ b/node_modules/cache/__tests__/save.test.ts @@ -0,0 +1,392 @@ +import * as cache from "@actions/cache"; +import * as core from "@actions/core"; + +import { Events, Inputs, RefKey } from "../src/constants"; +import run from "../src/save"; +import * as actionUtils from "../src/utils/actionUtils"; +import * as testUtils from "../src/utils/testUtils"; + +jest.mock("@actions/core"); +jest.mock("@actions/cache"); +jest.mock("../src/utils/actionUtils"); + +beforeAll(() => { + jest.spyOn(core, "getInput").mockImplementation((name, options) => { + return jest.requireActual("@actions/core").getInput(name, options); + }); + + jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => { + return jest.requireActual("../src/utils/actionUtils").getCacheState(); + }); + + jest.spyOn(actionUtils, "getInputAsArray").mockImplementation( + (name, options) => { + return jest + .requireActual("../src/utils/actionUtils") + .getInputAsArray(name, options); + } + ); + + jest.spyOn(actionUtils, "getInputAsInt").mockImplementation( + (name, options) => { + return jest + .requireActual("../src/utils/actionUtils") + .getInputAsInt(name, options); + } + ); + + jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( + (key, cacheResult) => { + return jest + .requireActual("../src/utils/actionUtils") + .isExactKeyMatch(key, cacheResult); + } + ); + + jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isValidEvent(); + }); +}); + +beforeEach(() => { + process.env[Events.Key] = Events.Push; + process.env[RefKey] = "refs/heads/feature-branch"; + + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false); + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => true + ); +}); + +afterEach(() => { + testUtils.clearInputs(); + delete process.env[Events.Key]; + delete process.env[RefKey]; +}); + +test("save with invalid event outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + const invalidEvent = "commit_comment"; + process.env[Events.Key] = invalidEvent; + delete process.env[RefKey]; + await run(); + expect(logWarningMock).toHaveBeenCalledWith( + `Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.` + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with no primary key in state outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return ""; + }); + const saveCacheMock = jest.spyOn(cache, "saveCache"); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(0); + expect(logWarningMock).toHaveBeenCalledWith( + `Error retrieving key from state.` + ); + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save without AC available should no-op", async () => { + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => false + ); + + const saveCacheMock = jest.spyOn(cache, "saveCache"); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(0); +}); + +test("save on ghes without AC available should no-op", async () => { + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true); + jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation( + () => false + ); + + const saveCacheMock = jest.spyOn(cache, "saveCache"); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(0); +}); + +test("save on GHES with AC available", async () => { + jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + testUtils.setInput(Inputs.UploadChunkSize, "4000000"); + + const cacheId = 4; + const saveCacheMock = jest + .spyOn(cache, "saveCache") + .mockImplementationOnce(() => { + return Promise.resolve(cacheId); + }); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, { + uploadChunkSize: 4000000 + }); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with exact match returns early", async () => { + const infoMock = jest.spyOn(core, "info"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = primaryKey; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + const saveCacheMock = jest.spyOn(cache, "saveCache"); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(0); + expect(infoMock).toHaveBeenCalledWith( + `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with missing input outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + const saveCacheMock = jest.spyOn(cache, "saveCache"); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(0); + expect(logWarningMock).toHaveBeenCalledWith( + "Input required and not supplied: path" + ); + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with large cache outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + + const saveCacheMock = jest + .spyOn(cache, "saveCache") + .mockImplementationOnce(() => { + throw new Error( + "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." + ); + }); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith( + [inputPath], + primaryKey, + expect.anything() + ); + + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith( + "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." + ); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with reserve cache failure outputs warning", async () => { + const infoMock = jest.spyOn(core, "info"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + + const saveCacheMock = jest + .spyOn(cache, "saveCache") + .mockImplementationOnce(() => { + const actualCache = jest.requireActual("@actions/cache"); + const error = new actualCache.ReserveCacheError( + `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ); + throw error; + }); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith( + [inputPath], + primaryKey, + expect.anything() + ); + + expect(infoMock).toHaveBeenCalledWith( + `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ); + expect(logWarningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with server error outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + + const saveCacheMock = jest + .spyOn(cache, "saveCache") + .mockImplementationOnce(() => { + throw new Error("HTTP Error Occurred"); + }); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith( + [inputPath], + primaryKey, + expect.anything() + ); + + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with valid inputs uploads a cache", async () => { + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const savedCacheKey = "Linux-node-"; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return savedCacheKey; + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + testUtils.setInput(Inputs.UploadChunkSize, "4000000"); + + const cacheId = 4; + const saveCacheMock = jest + .spyOn(cache, "saveCache") + .mockImplementationOnce(() => { + return Promise.resolve(cacheId); + }); + + await run(); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, { + uploadChunkSize: 4000000 + }); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); diff --git a/node_modules/cache/__tests__/verify-cache-files.sh b/node_modules/cache/__tests__/verify-cache-files.sh new file mode 100755 index 00000000..3ee8a842 --- /dev/null +++ b/node_modules/cache/__tests__/verify-cache-files.sh @@ -0,0 +1,36 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +path="$2" +if [ -z "$path" ]; then + echo "Must specify path argument" + exit 1 +fi + +# Sanity check GITHUB_RUN_ID defined +if [ -z "$GITHUB_RUN_ID" ]; then + echo "GITHUB_RUN_ID not defined" + exit 1 +fi + +# Verify file exists +file="$path/test-file.txt" +echo "Checking for $file" +if [ ! -e $file ]; then + echo "File does not exist" + exit 1 +fi + +# Verify file content +content="$(cat $file)" +echo "File content:\n$content" +if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then + echo "Unexpected file content" + exit 1 +fi diff --git a/node_modules/cache/action.yml b/node_modules/cache/action.yml new file mode 100644 index 00000000..b5460b75 --- /dev/null +++ b/node_modules/cache/action.yml @@ -0,0 +1,27 @@ +name: 'Cache' +description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time' +author: 'GitHub' +inputs: + path: + description: 'A list of files, directories, and wildcard patterns to cache and restore' + required: true + key: + description: 'An explicit key for restoring and saving the cache' + required: true + restore-keys: + description: 'An ordered list of keys to use for restoring the cache if no cache hit occurred for key' + required: false + upload-chunk-size: + description: 'The chunk size used to split up large files during upload, in bytes' + required: false +outputs: + cache-hit: + description: 'A boolean value to indicate an exact match was found for the primary key' +runs: + using: 'node16' + main: 'dist/restore/index.js' + post: 'dist/save/index.js' + post-if: 'success()' +branding: + icon: 'archive' + color: 'gray-dark' diff --git a/node_modules/cache/dist/restore/index.js b/node_modules/cache/dist/restore/index.js new file mode 100644 index 00000000..96ec35f1 --- /dev/null +++ b/node_modules/cache/dist/restore/index.js @@ -0,0 +1,60410 @@ +module.exports = +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(778); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */, +/* 1 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __webpack_require__(357); +const childProcess = __importStar(__webpack_require__(129)); +const path = __importStar(__webpack_require__(622)); +const util_1 = __webpack_require__(669); +const ioUtil = __importStar(__webpack_require__(672)); +const exec = util_1.promisify(childProcess.exec); +const execFile = util_1.promisify(childProcess.execFile); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + try { + const cmdPath = ioUtil.getCmdPath(); + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`${cmdPath} /s /c "rd /s /q "%inputPath%""`, { + env: { inputPath } + }); + } + else { + yield exec(`${cmdPath} /s /c "del /f /a "%inputPath%""`, { + env: { inputPath } + }); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + } + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield execFile(`rm`, [`-rf`, `${inputPath}`]); + } + else { + yield ioUtil.unlink(inputPath); + } + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), +/* 2 */, +/* 3 */, +/* 4 */, +/* 5 */, +/* 6 */, +/* 7 */, +/* 8 */, +/* 9 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__webpack_require__(87)); +const events = __importStar(__webpack_require__(614)); +const child = __importStar(__webpack_require__(129)); +const path = __importStar(__webpack_require__(622)); +const io = __importStar(__webpack_require__(1)); +const ioUtil = __importStar(__webpack_require__(672)); +const timers_1 = __webpack_require__(213); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), +/* 10 */, +/* 11 */, +/* 12 */, +/* 13 */, +/* 14 */, +/* 15 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const exec = __importStar(__webpack_require__(986)); +const glob = __importStar(__webpack_require__(281)); +const io = __importStar(__webpack_require__(1)); +const fs = __importStar(__webpack_require__(747)); +const path = __importStar(__webpack_require__(622)); +const semver = __importStar(__webpack_require__(280)); +const util = __importStar(__webpack_require__(669)); +const uuid_1 = __webpack_require__(898); +const constants_1 = __webpack_require__(931); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, uuid_1.v4()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeInBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +function resolvePaths(patterns) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Checking ${app} --version`); + let versionOutput = ''; + try { + yield exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } + const versionOutput = yield getVersion('zstd'); + const version = semver.clean(versionOutput); + if (!versionOutput.toLowerCase().includes('zstd command line interface')) { + // zstd is not installed + return constants_1.CompressionMethod.Gzip; + } + else if (!version || semver.lt(version, 'v1.3.2')) { + // zstd is installed but using a version earlier than v1.3.2 + // v1.3.2 is required to use the `--long` options in zstd + return constants_1.CompressionMethod.ZstdWithoutLong; + } + else { + return constants_1.CompressionMethod.Zstd; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function isGnuTarInstalled() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar'); + }); +} +exports.isGnuTarInstalled = isGnuTarInstalled; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +//# sourceMappingURL=cacheUtils.js.map + +/***/ }), +/* 16 */ +/***/ (function(module) { + +module.exports = require("tls"); + +/***/ }), +/* 17 */, +/* 18 */ +/***/ (function() { + +eval("require")("encoding"); + + +/***/ }), +/* 19 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDNotation, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = __webpack_require__(257); + + NodeType = __webpack_require__(683); + + module.exports = XMLDTDNotation = (function(superClass) { + extend(XMLDTDNotation, superClass); + + function XMLDTDNotation(parent, name, value) { + XMLDTDNotation.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD notation name. " + this.debugInfo(name)); + } + if (!value.pubID && !value.sysID) { + throw new Error("Public or system identifiers are required for an external entity. " + this.debugInfo(name)); + } + this.name = this.stringify.name(name); + this.type = NodeType.NotationDeclaration; + if (value.pubID != null) { + this.pubID = this.stringify.dtdPubID(value.pubID); + } + if (value.sysID != null) { + this.sysID = this.stringify.dtdSysID(value.sysID); + } + } + + Object.defineProperty(XMLDTDNotation.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDTDNotation.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + XMLDTDNotation.prototype.toString = function(options) { + return this.options.writer.dtdNotation(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDNotation; + + })(XMLNode); + +}).call(this); + + +/***/ }), +/* 20 */, +/* 21 */, +/* 22 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PropagationAPI = void 0; +var global_utils_1 = __webpack_require__(525); +var NoopTextMapPropagator_1 = __webpack_require__(918); +var TextMapPropagator_1 = __webpack_require__(881); +var context_helpers_1 = __webpack_require__(483); +var utils_1 = __webpack_require__(112); +var diag_1 = __webpack_require__(118); +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return global_utils_1.registerGlobal(API_NAME, propagator, diag_1.DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = TextMapPropagator_1.defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = TextMapPropagator_1.defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map + +/***/ }), +/* 23 */, +/* 24 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; + +/***/ }), +/* 25 */, +/* 26 */, +/* 27 */, +/* 28 */, +/* 29 */, +/* 30 */, +/* 31 */, +/* 32 */, +/* 33 */, +/* 34 */, +/* 35 */, +/* 36 */, +/* 37 */, +/* 38 */, +/* 39 */, +/* 40 */, +/* 41 */, +/* 42 */, +/* 43 */, +/* 44 */, +/* 45 */, +/* 46 */, +/* 47 */, +/* 48 */, +/* 49 */, +/* 50 */ +/***/ (function(module) { + +module.exports = ["ac","com.ac","edu.ac","gov.ac","net.ac","mil.ac","org.ac","ad","nom.ad","ae","co.ae","net.ae","org.ae","sch.ae","ac.ae","gov.ae","mil.ae","aero","accident-investigation.aero","accident-prevention.aero","aerobatic.aero","aeroclub.aero","aerodrome.aero","agents.aero","aircraft.aero","airline.aero","airport.aero","air-surveillance.aero","airtraffic.aero","air-traffic-control.aero","ambulance.aero","amusement.aero","association.aero","author.aero","ballooning.aero","broker.aero","caa.aero","cargo.aero","catering.aero","certification.aero","championship.aero","charter.aero","civilaviation.aero","club.aero","conference.aero","consultant.aero","consulting.aero","control.aero","council.aero","crew.aero","design.aero","dgca.aero","educator.aero","emergency.aero","engine.aero","engineer.aero","entertainment.aero","equipment.aero","exchange.aero","express.aero","federation.aero","flight.aero","freight.aero","fuel.aero","gliding.aero","government.aero","groundhandling.aero","group.aero","hanggliding.aero","homebuilt.aero","insurance.aero","journal.aero","journalist.aero","leasing.aero","logistics.aero","magazine.aero","maintenance.aero","media.aero","microlight.aero","modelling.aero","navigation.aero","parachuting.aero","paragliding.aero","passenger-association.aero","pilot.aero","press.aero","production.aero","recreation.aero","repbody.aero","res.aero","research.aero","rotorcraft.aero","safety.aero","scientist.aero","services.aero","show.aero","skydiving.aero","software.aero","student.aero","trader.aero","trading.aero","trainer.aero","union.aero","workinggroup.aero","works.aero","af","gov.af","com.af","org.af","net.af","edu.af","ag","com.ag","org.ag","net.ag","co.ag","nom.ag","ai","off.ai","com.ai","net.ai","org.ai","al","com.al","edu.al","gov.al","mil.al","net.al","org.al","am","co.am","com.am","commune.am","net.am","org.am","ao","ed.ao","gv.ao","og.ao","co.ao","pb.ao","it.ao","aq","ar","com.ar","edu.ar","gob.ar","gov.ar","int.ar","mil.ar","musica.ar","net.ar","org.ar","tur.ar","arpa","e164.arpa","in-addr.arpa","ip6.arpa","iris.arpa","uri.arpa","urn.arpa","as","gov.as","asia","at","ac.at","co.at","gv.at","or.at","au","com.au","net.au","org.au","edu.au","gov.au","asn.au","id.au","info.au","conf.au","oz.au","act.au","nsw.au","nt.au","qld.au","sa.au","tas.au","vic.au","wa.au","act.edu.au","catholic.edu.au","nsw.edu.au","nt.edu.au","qld.edu.au","sa.edu.au","tas.edu.au","vic.edu.au","wa.edu.au","qld.gov.au","sa.gov.au","tas.gov.au","vic.gov.au","wa.gov.au","education.tas.edu.au","schools.nsw.edu.au","aw","com.aw","ax","az","com.az","net.az","int.az","gov.az","org.az","edu.az","info.az","pp.az","mil.az","name.az","pro.az","biz.az","ba","com.ba","edu.ba","gov.ba","mil.ba","net.ba","org.ba","bb","biz.bb","co.bb","com.bb","edu.bb","gov.bb","info.bb","net.bb","org.bb","store.bb","tv.bb","*.bd","be","ac.be","bf","gov.bf","bg","a.bg","b.bg","c.bg","d.bg","e.bg","f.bg","g.bg","h.bg","i.bg","j.bg","k.bg","l.bg","m.bg","n.bg","o.bg","p.bg","q.bg","r.bg","s.bg","t.bg","u.bg","v.bg","w.bg","x.bg","y.bg","z.bg","0.bg","1.bg","2.bg","3.bg","4.bg","5.bg","6.bg","7.bg","8.bg","9.bg","bh","com.bh","edu.bh","net.bh","org.bh","gov.bh","bi","co.bi","com.bi","edu.bi","or.bi","org.bi","biz","bj","asso.bj","barreau.bj","gouv.bj","bm","com.bm","edu.bm","gov.bm","net.bm","org.bm","bn","com.bn","edu.bn","gov.bn","net.bn","org.bn","bo","com.bo","edu.bo","gob.bo","int.bo","org.bo","net.bo","mil.bo","tv.bo","web.bo","academia.bo","agro.bo","arte.bo","blog.bo","bolivia.bo","ciencia.bo","cooperativa.bo","democracia.bo","deporte.bo","ecologia.bo","economia.bo","empresa.bo","indigena.bo","industria.bo","info.bo","medicina.bo","movimiento.bo","musica.bo","natural.bo","nombre.bo","noticias.bo","patria.bo","politica.bo","profesional.bo","plurinacional.bo","pueblo.bo","revista.bo","salud.bo","tecnologia.bo","tksat.bo","transporte.bo","wiki.bo","br","9guacu.br","abc.br","adm.br","adv.br","agr.br","aju.br","am.br","anani.br","aparecida.br","arq.br","art.br","ato.br","b.br","barueri.br","belem.br","bhz.br","bio.br","blog.br","bmd.br","boavista.br","bsb.br","campinagrande.br","campinas.br","caxias.br","cim.br","cng.br","cnt.br","com.br","contagem.br","coop.br","cri.br","cuiaba.br","curitiba.br","def.br","ecn.br","eco.br","edu.br","emp.br","eng.br","esp.br","etc.br","eti.br","far.br","feira.br","flog.br","floripa.br","fm.br","fnd.br","fortal.br","fot.br","foz.br","fst.br","g12.br","ggf.br","goiania.br","gov.br","ac.gov.br","al.gov.br","am.gov.br","ap.gov.br","ba.gov.br","ce.gov.br","df.gov.br","es.gov.br","go.gov.br","ma.gov.br","mg.gov.br","ms.gov.br","mt.gov.br","pa.gov.br","pb.gov.br","pe.gov.br","pi.gov.br","pr.gov.br","rj.gov.br","rn.gov.br","ro.gov.br","rr.gov.br","rs.gov.br","sc.gov.br","se.gov.br","sp.gov.br","to.gov.br","gru.br","imb.br","ind.br","inf.br","jab.br","jampa.br","jdf.br","joinville.br","jor.br","jus.br","leg.br","lel.br","londrina.br","macapa.br","maceio.br","manaus.br","maringa.br","mat.br","med.br","mil.br","morena.br","mp.br","mus.br","natal.br","net.br","niteroi.br","*.nom.br","not.br","ntr.br","odo.br","ong.br","org.br","osasco.br","palmas.br","poa.br","ppg.br","pro.br","psc.br","psi.br","pvh.br","qsl.br","radio.br","rec.br","recife.br","ribeirao.br","rio.br","riobranco.br","riopreto.br","salvador.br","sampa.br","santamaria.br","santoandre.br","saobernardo.br","saogonca.br","sjc.br","slg.br","slz.br","sorocaba.br","srv.br","taxi.br","tc.br","teo.br","the.br","tmp.br","trd.br","tur.br","tv.br","udi.br","vet.br","vix.br","vlog.br","wiki.br","zlg.br","bs","com.bs","net.bs","org.bs","edu.bs","gov.bs","bt","com.bt","edu.bt","gov.bt","net.bt","org.bt","bv","bw","co.bw","org.bw","by","gov.by","mil.by","com.by","of.by","bz","com.bz","net.bz","org.bz","edu.bz","gov.bz","ca","ab.ca","bc.ca","mb.ca","nb.ca","nf.ca","nl.ca","ns.ca","nt.ca","nu.ca","on.ca","pe.ca","qc.ca","sk.ca","yk.ca","gc.ca","cat","cc","cd","gov.cd","cf","cg","ch","ci","org.ci","or.ci","com.ci","co.ci","edu.ci","ed.ci","ac.ci","net.ci","go.ci","asso.ci","aéroport.ci","int.ci","presse.ci","md.ci","gouv.ci","*.ck","!www.ck","cl","aprendemas.cl","co.cl","gob.cl","gov.cl","mil.cl","cm","co.cm","com.cm","gov.cm","net.cm","cn","ac.cn","com.cn","edu.cn","gov.cn","net.cn","org.cn","mil.cn","公司.cn","网络.cn","網絡.cn","ah.cn","bj.cn","cq.cn","fj.cn","gd.cn","gs.cn","gz.cn","gx.cn","ha.cn","hb.cn","he.cn","hi.cn","hl.cn","hn.cn","jl.cn","js.cn","jx.cn","ln.cn","nm.cn","nx.cn","qh.cn","sc.cn","sd.cn","sh.cn","sn.cn","sx.cn","tj.cn","xj.cn","xz.cn","yn.cn","zj.cn","hk.cn","mo.cn","tw.cn","co","arts.co","com.co","edu.co","firm.co","gov.co","info.co","int.co","mil.co","net.co","nom.co","org.co","rec.co","web.co","com","coop","cr","ac.cr","co.cr","ed.cr","fi.cr","go.cr","or.cr","sa.cr","cu","com.cu","edu.cu","org.cu","net.cu","gov.cu","inf.cu","cv","cw","com.cw","edu.cw","net.cw","org.cw","cx","gov.cx","cy","ac.cy","biz.cy","com.cy","ekloges.cy","gov.cy","ltd.cy","name.cy","net.cy","org.cy","parliament.cy","press.cy","pro.cy","tm.cy","cz","de","dj","dk","dm","com.dm","net.dm","org.dm","edu.dm","gov.dm","do","art.do","com.do","edu.do","gob.do","gov.do","mil.do","net.do","org.do","sld.do","web.do","dz","com.dz","org.dz","net.dz","gov.dz","edu.dz","asso.dz","pol.dz","art.dz","ec","com.ec","info.ec","net.ec","fin.ec","k12.ec","med.ec","pro.ec","org.ec","edu.ec","gov.ec","gob.ec","mil.ec","edu","ee","edu.ee","gov.ee","riik.ee","lib.ee","med.ee","com.ee","pri.ee","aip.ee","org.ee","fie.ee","eg","com.eg","edu.eg","eun.eg","gov.eg","mil.eg","name.eg","net.eg","org.eg","sci.eg","*.er","es","com.es","nom.es","org.es","gob.es","edu.es","et","com.et","gov.et","org.et","edu.et","biz.et","name.et","info.et","net.et","eu","fi","aland.fi","fj","ac.fj","biz.fj","com.fj","gov.fj","info.fj","mil.fj","name.fj","net.fj","org.fj","pro.fj","*.fk","fm","fo","fr","asso.fr","com.fr","gouv.fr","nom.fr","prd.fr","tm.fr","aeroport.fr","avocat.fr","avoues.fr","cci.fr","chambagri.fr","chirurgiens-dentistes.fr","experts-comptables.fr","geometre-expert.fr","greta.fr","huissier-justice.fr","medecin.fr","notaires.fr","pharmacien.fr","port.fr","veterinaire.fr","ga","gb","gd","ge","com.ge","edu.ge","gov.ge","org.ge","mil.ge","net.ge","pvt.ge","gf","gg","co.gg","net.gg","org.gg","gh","com.gh","edu.gh","gov.gh","org.gh","mil.gh","gi","com.gi","ltd.gi","gov.gi","mod.gi","edu.gi","org.gi","gl","co.gl","com.gl","edu.gl","net.gl","org.gl","gm","gn","ac.gn","com.gn","edu.gn","gov.gn","org.gn","net.gn","gov","gp","com.gp","net.gp","mobi.gp","edu.gp","org.gp","asso.gp","gq","gr","com.gr","edu.gr","net.gr","org.gr","gov.gr","gs","gt","com.gt","edu.gt","gob.gt","ind.gt","mil.gt","net.gt","org.gt","gu","com.gu","edu.gu","gov.gu","guam.gu","info.gu","net.gu","org.gu","web.gu","gw","gy","co.gy","com.gy","edu.gy","gov.gy","net.gy","org.gy","hk","com.hk","edu.hk","gov.hk","idv.hk","net.hk","org.hk","公司.hk","教育.hk","敎育.hk","政府.hk","個人.hk","个人.hk","箇人.hk","網络.hk","网络.hk","组織.hk","網絡.hk","网絡.hk","组织.hk","組織.hk","組织.hk","hm","hn","com.hn","edu.hn","org.hn","net.hn","mil.hn","gob.hn","hr","iz.hr","from.hr","name.hr","com.hr","ht","com.ht","shop.ht","firm.ht","info.ht","adult.ht","net.ht","pro.ht","org.ht","med.ht","art.ht","coop.ht","pol.ht","asso.ht","edu.ht","rel.ht","gouv.ht","perso.ht","hu","co.hu","info.hu","org.hu","priv.hu","sport.hu","tm.hu","2000.hu","agrar.hu","bolt.hu","casino.hu","city.hu","erotica.hu","erotika.hu","film.hu","forum.hu","games.hu","hotel.hu","ingatlan.hu","jogasz.hu","konyvelo.hu","lakas.hu","media.hu","news.hu","reklam.hu","sex.hu","shop.hu","suli.hu","szex.hu","tozsde.hu","utazas.hu","video.hu","id","ac.id","biz.id","co.id","desa.id","go.id","mil.id","my.id","net.id","or.id","ponpes.id","sch.id","web.id","ie","gov.ie","il","ac.il","co.il","gov.il","idf.il","k12.il","muni.il","net.il","org.il","im","ac.im","co.im","com.im","ltd.co.im","net.im","org.im","plc.co.im","tt.im","tv.im","in","co.in","firm.in","net.in","org.in","gen.in","ind.in","nic.in","ac.in","edu.in","res.in","gov.in","mil.in","info","int","eu.int","io","com.io","iq","gov.iq","edu.iq","mil.iq","com.iq","org.iq","net.iq","ir","ac.ir","co.ir","gov.ir","id.ir","net.ir","org.ir","sch.ir","ایران.ir","ايران.ir","is","net.is","com.is","edu.is","gov.is","org.is","int.is","it","gov.it","edu.it","abr.it","abruzzo.it","aosta-valley.it","aostavalley.it","bas.it","basilicata.it","cal.it","calabria.it","cam.it","campania.it","emilia-romagna.it","emiliaromagna.it","emr.it","friuli-v-giulia.it","friuli-ve-giulia.it","friuli-vegiulia.it","friuli-venezia-giulia.it","friuli-veneziagiulia.it","friuli-vgiulia.it","friuliv-giulia.it","friulive-giulia.it","friulivegiulia.it","friulivenezia-giulia.it","friuliveneziagiulia.it","friulivgiulia.it","fvg.it","laz.it","lazio.it","lig.it","liguria.it","lom.it","lombardia.it","lombardy.it","lucania.it","mar.it","marche.it","mol.it","molise.it","piedmont.it","piemonte.it","pmn.it","pug.it","puglia.it","sar.it","sardegna.it","sardinia.it","sic.it","sicilia.it","sicily.it","taa.it","tos.it","toscana.it","trentin-sud-tirol.it","trentin-süd-tirol.it","trentin-sudtirol.it","trentin-südtirol.it","trentin-sued-tirol.it","trentin-suedtirol.it","trentino-a-adige.it","trentino-aadige.it","trentino-alto-adige.it","trentino-altoadige.it","trentino-s-tirol.it","trentino-stirol.it","trentino-sud-tirol.it","trentino-süd-tirol.it","trentino-sudtirol.it","trentino-südtirol.it","trentino-sued-tirol.it","trentino-suedtirol.it","trentino.it","trentinoa-adige.it","trentinoaadige.it","trentinoalto-adige.it","trentinoaltoadige.it","trentinos-tirol.it","trentinostirol.it","trentinosud-tirol.it","trentinosüd-tirol.it","trentinosudtirol.it","trentinosüdtirol.it","trentinosued-tirol.it","trentinosuedtirol.it","trentinsud-tirol.it","trentinsüd-tirol.it","trentinsudtirol.it","trentinsüdtirol.it","trentinsued-tirol.it","trentinsuedtirol.it","tuscany.it","umb.it","umbria.it","val-d-aosta.it","val-daosta.it","vald-aosta.it","valdaosta.it","valle-aosta.it","valle-d-aosta.it","valle-daosta.it","valleaosta.it","valled-aosta.it","valledaosta.it","vallee-aoste.it","vallée-aoste.it","vallee-d-aoste.it","vallée-d-aoste.it","valleeaoste.it","valléeaoste.it","valleedaoste.it","valléedaoste.it","vao.it","vda.it","ven.it","veneto.it","ag.it","agrigento.it","al.it","alessandria.it","alto-adige.it","altoadige.it","an.it","ancona.it","andria-barletta-trani.it","andria-trani-barletta.it","andriabarlettatrani.it","andriatranibarletta.it","ao.it","aosta.it","aoste.it","ap.it","aq.it","aquila.it","ar.it","arezzo.it","ascoli-piceno.it","ascolipiceno.it","asti.it","at.it","av.it","avellino.it","ba.it","balsan-sudtirol.it","balsan-südtirol.it","balsan-suedtirol.it","balsan.it","bari.it","barletta-trani-andria.it","barlettatraniandria.it","belluno.it","benevento.it","bergamo.it","bg.it","bi.it","biella.it","bl.it","bn.it","bo.it","bologna.it","bolzano-altoadige.it","bolzano.it","bozen-sudtirol.it","bozen-südtirol.it","bozen-suedtirol.it","bozen.it","br.it","brescia.it","brindisi.it","bs.it","bt.it","bulsan-sudtirol.it","bulsan-südtirol.it","bulsan-suedtirol.it","bulsan.it","bz.it","ca.it","cagliari.it","caltanissetta.it","campidano-medio.it","campidanomedio.it","campobasso.it","carbonia-iglesias.it","carboniaiglesias.it","carrara-massa.it","carraramassa.it","caserta.it","catania.it","catanzaro.it","cb.it","ce.it","cesena-forli.it","cesena-forlì.it","cesenaforli.it","cesenaforlì.it","ch.it","chieti.it","ci.it","cl.it","cn.it","co.it","como.it","cosenza.it","cr.it","cremona.it","crotone.it","cs.it","ct.it","cuneo.it","cz.it","dell-ogliastra.it","dellogliastra.it","en.it","enna.it","fc.it","fe.it","fermo.it","ferrara.it","fg.it","fi.it","firenze.it","florence.it","fm.it","foggia.it","forli-cesena.it","forlì-cesena.it","forlicesena.it","forlìcesena.it","fr.it","frosinone.it","ge.it","genoa.it","genova.it","go.it","gorizia.it","gr.it","grosseto.it","iglesias-carbonia.it","iglesiascarbonia.it","im.it","imperia.it","is.it","isernia.it","kr.it","la-spezia.it","laquila.it","laspezia.it","latina.it","lc.it","le.it","lecce.it","lecco.it","li.it","livorno.it","lo.it","lodi.it","lt.it","lu.it","lucca.it","macerata.it","mantova.it","massa-carrara.it","massacarrara.it","matera.it","mb.it","mc.it","me.it","medio-campidano.it","mediocampidano.it","messina.it","mi.it","milan.it","milano.it","mn.it","mo.it","modena.it","monza-brianza.it","monza-e-della-brianza.it","monza.it","monzabrianza.it","monzaebrianza.it","monzaedellabrianza.it","ms.it","mt.it","na.it","naples.it","napoli.it","no.it","novara.it","nu.it","nuoro.it","og.it","ogliastra.it","olbia-tempio.it","olbiatempio.it","or.it","oristano.it","ot.it","pa.it","padova.it","padua.it","palermo.it","parma.it","pavia.it","pc.it","pd.it","pe.it","perugia.it","pesaro-urbino.it","pesarourbino.it","pescara.it","pg.it","pi.it","piacenza.it","pisa.it","pistoia.it","pn.it","po.it","pordenone.it","potenza.it","pr.it","prato.it","pt.it","pu.it","pv.it","pz.it","ra.it","ragusa.it","ravenna.it","rc.it","re.it","reggio-calabria.it","reggio-emilia.it","reggiocalabria.it","reggioemilia.it","rg.it","ri.it","rieti.it","rimini.it","rm.it","rn.it","ro.it","roma.it","rome.it","rovigo.it","sa.it","salerno.it","sassari.it","savona.it","si.it","siena.it","siracusa.it","so.it","sondrio.it","sp.it","sr.it","ss.it","suedtirol.it","südtirol.it","sv.it","ta.it","taranto.it","te.it","tempio-olbia.it","tempioolbia.it","teramo.it","terni.it","tn.it","to.it","torino.it","tp.it","tr.it","trani-andria-barletta.it","trani-barletta-andria.it","traniandriabarletta.it","tranibarlettaandria.it","trapani.it","trento.it","treviso.it","trieste.it","ts.it","turin.it","tv.it","ud.it","udine.it","urbino-pesaro.it","urbinopesaro.it","va.it","varese.it","vb.it","vc.it","ve.it","venezia.it","venice.it","verbania.it","vercelli.it","verona.it","vi.it","vibo-valentia.it","vibovalentia.it","vicenza.it","viterbo.it","vr.it","vs.it","vt.it","vv.it","je","co.je","net.je","org.je","*.jm","jo","com.jo","org.jo","net.jo","edu.jo","sch.jo","gov.jo","mil.jo","name.jo","jobs","jp","ac.jp","ad.jp","co.jp","ed.jp","go.jp","gr.jp","lg.jp","ne.jp","or.jp","aichi.jp","akita.jp","aomori.jp","chiba.jp","ehime.jp","fukui.jp","fukuoka.jp","fukushima.jp","gifu.jp","gunma.jp","hiroshima.jp","hokkaido.jp","hyogo.jp","ibaraki.jp","ishikawa.jp","iwate.jp","kagawa.jp","kagoshima.jp","kanagawa.jp","kochi.jp","kumamoto.jp","kyoto.jp","mie.jp","miyagi.jp","miyazaki.jp","nagano.jp","nagasaki.jp","nara.jp","niigata.jp","oita.jp","okayama.jp","okinawa.jp","osaka.jp","saga.jp","saitama.jp","shiga.jp","shimane.jp","shizuoka.jp","tochigi.jp","tokushima.jp","tokyo.jp","tottori.jp","toyama.jp","wakayama.jp","yamagata.jp","yamaguchi.jp","yamanashi.jp","栃木.jp","愛知.jp","愛媛.jp","兵庫.jp","熊本.jp","茨城.jp","北海道.jp","千葉.jp","和歌山.jp","長崎.jp","長野.jp","新潟.jp","青森.jp","静岡.jp","東京.jp","石川.jp","埼玉.jp","三重.jp","京都.jp","佐賀.jp","大分.jp","大阪.jp","奈良.jp","宮城.jp","宮崎.jp","富山.jp","山口.jp","山形.jp","山梨.jp","岩手.jp","岐阜.jp","岡山.jp","島根.jp","広島.jp","徳島.jp","沖縄.jp","滋賀.jp","神奈川.jp","福井.jp","福岡.jp","福島.jp","秋田.jp","群馬.jp","香川.jp","高知.jp","鳥取.jp","鹿児島.jp","*.kawasaki.jp","*.kitakyushu.jp","*.kobe.jp","*.nagoya.jp","*.sapporo.jp","*.sendai.jp","*.yokohama.jp","!city.kawasaki.jp","!city.kitakyushu.jp","!city.kobe.jp","!city.nagoya.jp","!city.sapporo.jp","!city.sendai.jp","!city.yokohama.jp","aisai.aichi.jp","ama.aichi.jp","anjo.aichi.jp","asuke.aichi.jp","chiryu.aichi.jp","chita.aichi.jp","fuso.aichi.jp","gamagori.aichi.jp","handa.aichi.jp","hazu.aichi.jp","hekinan.aichi.jp","higashiura.aichi.jp","ichinomiya.aichi.jp","inazawa.aichi.jp","inuyama.aichi.jp","isshiki.aichi.jp","iwakura.aichi.jp","kanie.aichi.jp","kariya.aichi.jp","kasugai.aichi.jp","kira.aichi.jp","kiyosu.aichi.jp","komaki.aichi.jp","konan.aichi.jp","kota.aichi.jp","mihama.aichi.jp","miyoshi.aichi.jp","nishio.aichi.jp","nisshin.aichi.jp","obu.aichi.jp","oguchi.aichi.jp","oharu.aichi.jp","okazaki.aichi.jp","owariasahi.aichi.jp","seto.aichi.jp","shikatsu.aichi.jp","shinshiro.aichi.jp","shitara.aichi.jp","tahara.aichi.jp","takahama.aichi.jp","tobishima.aichi.jp","toei.aichi.jp","togo.aichi.jp","tokai.aichi.jp","tokoname.aichi.jp","toyoake.aichi.jp","toyohashi.aichi.jp","toyokawa.aichi.jp","toyone.aichi.jp","toyota.aichi.jp","tsushima.aichi.jp","yatomi.aichi.jp","akita.akita.jp","daisen.akita.jp","fujisato.akita.jp","gojome.akita.jp","hachirogata.akita.jp","happou.akita.jp","higashinaruse.akita.jp","honjo.akita.jp","honjyo.akita.jp","ikawa.akita.jp","kamikoani.akita.jp","kamioka.akita.jp","katagami.akita.jp","kazuno.akita.jp","kitaakita.akita.jp","kosaka.akita.jp","kyowa.akita.jp","misato.akita.jp","mitane.akita.jp","moriyoshi.akita.jp","nikaho.akita.jp","noshiro.akita.jp","odate.akita.jp","oga.akita.jp","ogata.akita.jp","semboku.akita.jp","yokote.akita.jp","yurihonjo.akita.jp","aomori.aomori.jp","gonohe.aomori.jp","hachinohe.aomori.jp","hashikami.aomori.jp","hiranai.aomori.jp","hirosaki.aomori.jp","itayanagi.aomori.jp","kuroishi.aomori.jp","misawa.aomori.jp","mutsu.aomori.jp","nakadomari.aomori.jp","noheji.aomori.jp","oirase.aomori.jp","owani.aomori.jp","rokunohe.aomori.jp","sannohe.aomori.jp","shichinohe.aomori.jp","shingo.aomori.jp","takko.aomori.jp","towada.aomori.jp","tsugaru.aomori.jp","tsuruta.aomori.jp","abiko.chiba.jp","asahi.chiba.jp","chonan.chiba.jp","chosei.chiba.jp","choshi.chiba.jp","chuo.chiba.jp","funabashi.chiba.jp","futtsu.chiba.jp","hanamigawa.chiba.jp","ichihara.chiba.jp","ichikawa.chiba.jp","ichinomiya.chiba.jp","inzai.chiba.jp","isumi.chiba.jp","kamagaya.chiba.jp","kamogawa.chiba.jp","kashiwa.chiba.jp","katori.chiba.jp","katsuura.chiba.jp","kimitsu.chiba.jp","kisarazu.chiba.jp","kozaki.chiba.jp","kujukuri.chiba.jp","kyonan.chiba.jp","matsudo.chiba.jp","midori.chiba.jp","mihama.chiba.jp","minamiboso.chiba.jp","mobara.chiba.jp","mutsuzawa.chiba.jp","nagara.chiba.jp","nagareyama.chiba.jp","narashino.chiba.jp","narita.chiba.jp","noda.chiba.jp","oamishirasato.chiba.jp","omigawa.chiba.jp","onjuku.chiba.jp","otaki.chiba.jp","sakae.chiba.jp","sakura.chiba.jp","shimofusa.chiba.jp","shirako.chiba.jp","shiroi.chiba.jp","shisui.chiba.jp","sodegaura.chiba.jp","sosa.chiba.jp","tako.chiba.jp","tateyama.chiba.jp","togane.chiba.jp","tohnosho.chiba.jp","tomisato.chiba.jp","urayasu.chiba.jp","yachimata.chiba.jp","yachiyo.chiba.jp","yokaichiba.chiba.jp","yokoshibahikari.chiba.jp","yotsukaido.chiba.jp","ainan.ehime.jp","honai.ehime.jp","ikata.ehime.jp","imabari.ehime.jp","iyo.ehime.jp","kamijima.ehime.jp","kihoku.ehime.jp","kumakogen.ehime.jp","masaki.ehime.jp","matsuno.ehime.jp","matsuyama.ehime.jp","namikata.ehime.jp","niihama.ehime.jp","ozu.ehime.jp","saijo.ehime.jp","seiyo.ehime.jp","shikokuchuo.ehime.jp","tobe.ehime.jp","toon.ehime.jp","uchiko.ehime.jp","uwajima.ehime.jp","yawatahama.ehime.jp","echizen.fukui.jp","eiheiji.fukui.jp","fukui.fukui.jp","ikeda.fukui.jp","katsuyama.fukui.jp","mihama.fukui.jp","minamiechizen.fukui.jp","obama.fukui.jp","ohi.fukui.jp","ono.fukui.jp","sabae.fukui.jp","sakai.fukui.jp","takahama.fukui.jp","tsuruga.fukui.jp","wakasa.fukui.jp","ashiya.fukuoka.jp","buzen.fukuoka.jp","chikugo.fukuoka.jp","chikuho.fukuoka.jp","chikujo.fukuoka.jp","chikushino.fukuoka.jp","chikuzen.fukuoka.jp","chuo.fukuoka.jp","dazaifu.fukuoka.jp","fukuchi.fukuoka.jp","hakata.fukuoka.jp","higashi.fukuoka.jp","hirokawa.fukuoka.jp","hisayama.fukuoka.jp","iizuka.fukuoka.jp","inatsuki.fukuoka.jp","kaho.fukuoka.jp","kasuga.fukuoka.jp","kasuya.fukuoka.jp","kawara.fukuoka.jp","keisen.fukuoka.jp","koga.fukuoka.jp","kurate.fukuoka.jp","kurogi.fukuoka.jp","kurume.fukuoka.jp","minami.fukuoka.jp","miyako.fukuoka.jp","miyama.fukuoka.jp","miyawaka.fukuoka.jp","mizumaki.fukuoka.jp","munakata.fukuoka.jp","nakagawa.fukuoka.jp","nakama.fukuoka.jp","nishi.fukuoka.jp","nogata.fukuoka.jp","ogori.fukuoka.jp","okagaki.fukuoka.jp","okawa.fukuoka.jp","oki.fukuoka.jp","omuta.fukuoka.jp","onga.fukuoka.jp","onojo.fukuoka.jp","oto.fukuoka.jp","saigawa.fukuoka.jp","sasaguri.fukuoka.jp","shingu.fukuoka.jp","shinyoshitomi.fukuoka.jp","shonai.fukuoka.jp","soeda.fukuoka.jp","sue.fukuoka.jp","tachiarai.fukuoka.jp","tagawa.fukuoka.jp","takata.fukuoka.jp","toho.fukuoka.jp","toyotsu.fukuoka.jp","tsuiki.fukuoka.jp","ukiha.fukuoka.jp","umi.fukuoka.jp","usui.fukuoka.jp","yamada.fukuoka.jp","yame.fukuoka.jp","yanagawa.fukuoka.jp","yukuhashi.fukuoka.jp","aizubange.fukushima.jp","aizumisato.fukushima.jp","aizuwakamatsu.fukushima.jp","asakawa.fukushima.jp","bandai.fukushima.jp","date.fukushima.jp","fukushima.fukushima.jp","furudono.fukushima.jp","futaba.fukushima.jp","hanawa.fukushima.jp","higashi.fukushima.jp","hirata.fukushima.jp","hirono.fukushima.jp","iitate.fukushima.jp","inawashiro.fukushima.jp","ishikawa.fukushima.jp","iwaki.fukushima.jp","izumizaki.fukushima.jp","kagamiishi.fukushima.jp","kaneyama.fukushima.jp","kawamata.fukushima.jp","kitakata.fukushima.jp","kitashiobara.fukushima.jp","koori.fukushima.jp","koriyama.fukushima.jp","kunimi.fukushima.jp","miharu.fukushima.jp","mishima.fukushima.jp","namie.fukushima.jp","nango.fukushima.jp","nishiaizu.fukushima.jp","nishigo.fukushima.jp","okuma.fukushima.jp","omotego.fukushima.jp","ono.fukushima.jp","otama.fukushima.jp","samegawa.fukushima.jp","shimogo.fukushima.jp","shirakawa.fukushima.jp","showa.fukushima.jp","soma.fukushima.jp","sukagawa.fukushima.jp","taishin.fukushima.jp","tamakawa.fukushima.jp","tanagura.fukushima.jp","tenei.fukushima.jp","yabuki.fukushima.jp","yamato.fukushima.jp","yamatsuri.fukushima.jp","yanaizu.fukushima.jp","yugawa.fukushima.jp","anpachi.gifu.jp","ena.gifu.jp","gifu.gifu.jp","ginan.gifu.jp","godo.gifu.jp","gujo.gifu.jp","hashima.gifu.jp","hichiso.gifu.jp","hida.gifu.jp","higashishirakawa.gifu.jp","ibigawa.gifu.jp","ikeda.gifu.jp","kakamigahara.gifu.jp","kani.gifu.jp","kasahara.gifu.jp","kasamatsu.gifu.jp","kawaue.gifu.jp","kitagata.gifu.jp","mino.gifu.jp","minokamo.gifu.jp","mitake.gifu.jp","mizunami.gifu.jp","motosu.gifu.jp","nakatsugawa.gifu.jp","ogaki.gifu.jp","sakahogi.gifu.jp","seki.gifu.jp","sekigahara.gifu.jp","shirakawa.gifu.jp","tajimi.gifu.jp","takayama.gifu.jp","tarui.gifu.jp","toki.gifu.jp","tomika.gifu.jp","wanouchi.gifu.jp","yamagata.gifu.jp","yaotsu.gifu.jp","yoro.gifu.jp","annaka.gunma.jp","chiyoda.gunma.jp","fujioka.gunma.jp","higashiagatsuma.gunma.jp","isesaki.gunma.jp","itakura.gunma.jp","kanna.gunma.jp","kanra.gunma.jp","katashina.gunma.jp","kawaba.gunma.jp","kiryu.gunma.jp","kusatsu.gunma.jp","maebashi.gunma.jp","meiwa.gunma.jp","midori.gunma.jp","minakami.gunma.jp","naganohara.gunma.jp","nakanojo.gunma.jp","nanmoku.gunma.jp","numata.gunma.jp","oizumi.gunma.jp","ora.gunma.jp","ota.gunma.jp","shibukawa.gunma.jp","shimonita.gunma.jp","shinto.gunma.jp","showa.gunma.jp","takasaki.gunma.jp","takayama.gunma.jp","tamamura.gunma.jp","tatebayashi.gunma.jp","tomioka.gunma.jp","tsukiyono.gunma.jp","tsumagoi.gunma.jp","ueno.gunma.jp","yoshioka.gunma.jp","asaminami.hiroshima.jp","daiwa.hiroshima.jp","etajima.hiroshima.jp","fuchu.hiroshima.jp","fukuyama.hiroshima.jp","hatsukaichi.hiroshima.jp","higashihiroshima.hiroshima.jp","hongo.hiroshima.jp","jinsekikogen.hiroshima.jp","kaita.hiroshima.jp","kui.hiroshima.jp","kumano.hiroshima.jp","kure.hiroshima.jp","mihara.hiroshima.jp","miyoshi.hiroshima.jp","naka.hiroshima.jp","onomichi.hiroshima.jp","osakikamijima.hiroshima.jp","otake.hiroshima.jp","saka.hiroshima.jp","sera.hiroshima.jp","seranishi.hiroshima.jp","shinichi.hiroshima.jp","shobara.hiroshima.jp","takehara.hiroshima.jp","abashiri.hokkaido.jp","abira.hokkaido.jp","aibetsu.hokkaido.jp","akabira.hokkaido.jp","akkeshi.hokkaido.jp","asahikawa.hokkaido.jp","ashibetsu.hokkaido.jp","ashoro.hokkaido.jp","assabu.hokkaido.jp","atsuma.hokkaido.jp","bibai.hokkaido.jp","biei.hokkaido.jp","bifuka.hokkaido.jp","bihoro.hokkaido.jp","biratori.hokkaido.jp","chippubetsu.hokkaido.jp","chitose.hokkaido.jp","date.hokkaido.jp","ebetsu.hokkaido.jp","embetsu.hokkaido.jp","eniwa.hokkaido.jp","erimo.hokkaido.jp","esan.hokkaido.jp","esashi.hokkaido.jp","fukagawa.hokkaido.jp","fukushima.hokkaido.jp","furano.hokkaido.jp","furubira.hokkaido.jp","haboro.hokkaido.jp","hakodate.hokkaido.jp","hamatonbetsu.hokkaido.jp","hidaka.hokkaido.jp","higashikagura.hokkaido.jp","higashikawa.hokkaido.jp","hiroo.hokkaido.jp","hokuryu.hokkaido.jp","hokuto.hokkaido.jp","honbetsu.hokkaido.jp","horokanai.hokkaido.jp","horonobe.hokkaido.jp","ikeda.hokkaido.jp","imakane.hokkaido.jp","ishikari.hokkaido.jp","iwamizawa.hokkaido.jp","iwanai.hokkaido.jp","kamifurano.hokkaido.jp","kamikawa.hokkaido.jp","kamishihoro.hokkaido.jp","kamisunagawa.hokkaido.jp","kamoenai.hokkaido.jp","kayabe.hokkaido.jp","kembuchi.hokkaido.jp","kikonai.hokkaido.jp","kimobetsu.hokkaido.jp","kitahiroshima.hokkaido.jp","kitami.hokkaido.jp","kiyosato.hokkaido.jp","koshimizu.hokkaido.jp","kunneppu.hokkaido.jp","kuriyama.hokkaido.jp","kuromatsunai.hokkaido.jp","kushiro.hokkaido.jp","kutchan.hokkaido.jp","kyowa.hokkaido.jp","mashike.hokkaido.jp","matsumae.hokkaido.jp","mikasa.hokkaido.jp","minamifurano.hokkaido.jp","mombetsu.hokkaido.jp","moseushi.hokkaido.jp","mukawa.hokkaido.jp","muroran.hokkaido.jp","naie.hokkaido.jp","nakagawa.hokkaido.jp","nakasatsunai.hokkaido.jp","nakatombetsu.hokkaido.jp","nanae.hokkaido.jp","nanporo.hokkaido.jp","nayoro.hokkaido.jp","nemuro.hokkaido.jp","niikappu.hokkaido.jp","niki.hokkaido.jp","nishiokoppe.hokkaido.jp","noboribetsu.hokkaido.jp","numata.hokkaido.jp","obihiro.hokkaido.jp","obira.hokkaido.jp","oketo.hokkaido.jp","okoppe.hokkaido.jp","otaru.hokkaido.jp","otobe.hokkaido.jp","otofuke.hokkaido.jp","otoineppu.hokkaido.jp","oumu.hokkaido.jp","ozora.hokkaido.jp","pippu.hokkaido.jp","rankoshi.hokkaido.jp","rebun.hokkaido.jp","rikubetsu.hokkaido.jp","rishiri.hokkaido.jp","rishirifuji.hokkaido.jp","saroma.hokkaido.jp","sarufutsu.hokkaido.jp","shakotan.hokkaido.jp","shari.hokkaido.jp","shibecha.hokkaido.jp","shibetsu.hokkaido.jp","shikabe.hokkaido.jp","shikaoi.hokkaido.jp","shimamaki.hokkaido.jp","shimizu.hokkaido.jp","shimokawa.hokkaido.jp","shinshinotsu.hokkaido.jp","shintoku.hokkaido.jp","shiranuka.hokkaido.jp","shiraoi.hokkaido.jp","shiriuchi.hokkaido.jp","sobetsu.hokkaido.jp","sunagawa.hokkaido.jp","taiki.hokkaido.jp","takasu.hokkaido.jp","takikawa.hokkaido.jp","takinoue.hokkaido.jp","teshikaga.hokkaido.jp","tobetsu.hokkaido.jp","tohma.hokkaido.jp","tomakomai.hokkaido.jp","tomari.hokkaido.jp","toya.hokkaido.jp","toyako.hokkaido.jp","toyotomi.hokkaido.jp","toyoura.hokkaido.jp","tsubetsu.hokkaido.jp","tsukigata.hokkaido.jp","urakawa.hokkaido.jp","urausu.hokkaido.jp","uryu.hokkaido.jp","utashinai.hokkaido.jp","wakkanai.hokkaido.jp","wassamu.hokkaido.jp","yakumo.hokkaido.jp","yoichi.hokkaido.jp","aioi.hyogo.jp","akashi.hyogo.jp","ako.hyogo.jp","amagasaki.hyogo.jp","aogaki.hyogo.jp","asago.hyogo.jp","ashiya.hyogo.jp","awaji.hyogo.jp","fukusaki.hyogo.jp","goshiki.hyogo.jp","harima.hyogo.jp","himeji.hyogo.jp","ichikawa.hyogo.jp","inagawa.hyogo.jp","itami.hyogo.jp","kakogawa.hyogo.jp","kamigori.hyogo.jp","kamikawa.hyogo.jp","kasai.hyogo.jp","kasuga.hyogo.jp","kawanishi.hyogo.jp","miki.hyogo.jp","minamiawaji.hyogo.jp","nishinomiya.hyogo.jp","nishiwaki.hyogo.jp","ono.hyogo.jp","sanda.hyogo.jp","sannan.hyogo.jp","sasayama.hyogo.jp","sayo.hyogo.jp","shingu.hyogo.jp","shinonsen.hyogo.jp","shiso.hyogo.jp","sumoto.hyogo.jp","taishi.hyogo.jp","taka.hyogo.jp","takarazuka.hyogo.jp","takasago.hyogo.jp","takino.hyogo.jp","tamba.hyogo.jp","tatsuno.hyogo.jp","toyooka.hyogo.jp","yabu.hyogo.jp","yashiro.hyogo.jp","yoka.hyogo.jp","yokawa.hyogo.jp","ami.ibaraki.jp","asahi.ibaraki.jp","bando.ibaraki.jp","chikusei.ibaraki.jp","daigo.ibaraki.jp","fujishiro.ibaraki.jp","hitachi.ibaraki.jp","hitachinaka.ibaraki.jp","hitachiomiya.ibaraki.jp","hitachiota.ibaraki.jp","ibaraki.ibaraki.jp","ina.ibaraki.jp","inashiki.ibaraki.jp","itako.ibaraki.jp","iwama.ibaraki.jp","joso.ibaraki.jp","kamisu.ibaraki.jp","kasama.ibaraki.jp","kashima.ibaraki.jp","kasumigaura.ibaraki.jp","koga.ibaraki.jp","miho.ibaraki.jp","mito.ibaraki.jp","moriya.ibaraki.jp","naka.ibaraki.jp","namegata.ibaraki.jp","oarai.ibaraki.jp","ogawa.ibaraki.jp","omitama.ibaraki.jp","ryugasaki.ibaraki.jp","sakai.ibaraki.jp","sakuragawa.ibaraki.jp","shimodate.ibaraki.jp","shimotsuma.ibaraki.jp","shirosato.ibaraki.jp","sowa.ibaraki.jp","suifu.ibaraki.jp","takahagi.ibaraki.jp","tamatsukuri.ibaraki.jp","tokai.ibaraki.jp","tomobe.ibaraki.jp","tone.ibaraki.jp","toride.ibaraki.jp","tsuchiura.ibaraki.jp","tsukuba.ibaraki.jp","uchihara.ibaraki.jp","ushiku.ibaraki.jp","yachiyo.ibaraki.jp","yamagata.ibaraki.jp","yawara.ibaraki.jp","yuki.ibaraki.jp","anamizu.ishikawa.jp","hakui.ishikawa.jp","hakusan.ishikawa.jp","kaga.ishikawa.jp","kahoku.ishikawa.jp","kanazawa.ishikawa.jp","kawakita.ishikawa.jp","komatsu.ishikawa.jp","nakanoto.ishikawa.jp","nanao.ishikawa.jp","nomi.ishikawa.jp","nonoichi.ishikawa.jp","noto.ishikawa.jp","shika.ishikawa.jp","suzu.ishikawa.jp","tsubata.ishikawa.jp","tsurugi.ishikawa.jp","uchinada.ishikawa.jp","wajima.ishikawa.jp","fudai.iwate.jp","fujisawa.iwate.jp","hanamaki.iwate.jp","hiraizumi.iwate.jp","hirono.iwate.jp","ichinohe.iwate.jp","ichinoseki.iwate.jp","iwaizumi.iwate.jp","iwate.iwate.jp","joboji.iwate.jp","kamaishi.iwate.jp","kanegasaki.iwate.jp","karumai.iwate.jp","kawai.iwate.jp","kitakami.iwate.jp","kuji.iwate.jp","kunohe.iwate.jp","kuzumaki.iwate.jp","miyako.iwate.jp","mizusawa.iwate.jp","morioka.iwate.jp","ninohe.iwate.jp","noda.iwate.jp","ofunato.iwate.jp","oshu.iwate.jp","otsuchi.iwate.jp","rikuzentakata.iwate.jp","shiwa.iwate.jp","shizukuishi.iwate.jp","sumita.iwate.jp","tanohata.iwate.jp","tono.iwate.jp","yahaba.iwate.jp","yamada.iwate.jp","ayagawa.kagawa.jp","higashikagawa.kagawa.jp","kanonji.kagawa.jp","kotohira.kagawa.jp","manno.kagawa.jp","marugame.kagawa.jp","mitoyo.kagawa.jp","naoshima.kagawa.jp","sanuki.kagawa.jp","tadotsu.kagawa.jp","takamatsu.kagawa.jp","tonosho.kagawa.jp","uchinomi.kagawa.jp","utazu.kagawa.jp","zentsuji.kagawa.jp","akune.kagoshima.jp","amami.kagoshima.jp","hioki.kagoshima.jp","isa.kagoshima.jp","isen.kagoshima.jp","izumi.kagoshima.jp","kagoshima.kagoshima.jp","kanoya.kagoshima.jp","kawanabe.kagoshima.jp","kinko.kagoshima.jp","kouyama.kagoshima.jp","makurazaki.kagoshima.jp","matsumoto.kagoshima.jp","minamitane.kagoshima.jp","nakatane.kagoshima.jp","nishinoomote.kagoshima.jp","satsumasendai.kagoshima.jp","soo.kagoshima.jp","tarumizu.kagoshima.jp","yusui.kagoshima.jp","aikawa.kanagawa.jp","atsugi.kanagawa.jp","ayase.kanagawa.jp","chigasaki.kanagawa.jp","ebina.kanagawa.jp","fujisawa.kanagawa.jp","hadano.kanagawa.jp","hakone.kanagawa.jp","hiratsuka.kanagawa.jp","isehara.kanagawa.jp","kaisei.kanagawa.jp","kamakura.kanagawa.jp","kiyokawa.kanagawa.jp","matsuda.kanagawa.jp","minamiashigara.kanagawa.jp","miura.kanagawa.jp","nakai.kanagawa.jp","ninomiya.kanagawa.jp","odawara.kanagawa.jp","oi.kanagawa.jp","oiso.kanagawa.jp","sagamihara.kanagawa.jp","samukawa.kanagawa.jp","tsukui.kanagawa.jp","yamakita.kanagawa.jp","yamato.kanagawa.jp","yokosuka.kanagawa.jp","yugawara.kanagawa.jp","zama.kanagawa.jp","zushi.kanagawa.jp","aki.kochi.jp","geisei.kochi.jp","hidaka.kochi.jp","higashitsuno.kochi.jp","ino.kochi.jp","kagami.kochi.jp","kami.kochi.jp","kitagawa.kochi.jp","kochi.kochi.jp","mihara.kochi.jp","motoyama.kochi.jp","muroto.kochi.jp","nahari.kochi.jp","nakamura.kochi.jp","nankoku.kochi.jp","nishitosa.kochi.jp","niyodogawa.kochi.jp","ochi.kochi.jp","okawa.kochi.jp","otoyo.kochi.jp","otsuki.kochi.jp","sakawa.kochi.jp","sukumo.kochi.jp","susaki.kochi.jp","tosa.kochi.jp","tosashimizu.kochi.jp","toyo.kochi.jp","tsuno.kochi.jp","umaji.kochi.jp","yasuda.kochi.jp","yusuhara.kochi.jp","amakusa.kumamoto.jp","arao.kumamoto.jp","aso.kumamoto.jp","choyo.kumamoto.jp","gyokuto.kumamoto.jp","kamiamakusa.kumamoto.jp","kikuchi.kumamoto.jp","kumamoto.kumamoto.jp","mashiki.kumamoto.jp","mifune.kumamoto.jp","minamata.kumamoto.jp","minamioguni.kumamoto.jp","nagasu.kumamoto.jp","nishihara.kumamoto.jp","oguni.kumamoto.jp","ozu.kumamoto.jp","sumoto.kumamoto.jp","takamori.kumamoto.jp","uki.kumamoto.jp","uto.kumamoto.jp","yamaga.kumamoto.jp","yamato.kumamoto.jp","yatsushiro.kumamoto.jp","ayabe.kyoto.jp","fukuchiyama.kyoto.jp","higashiyama.kyoto.jp","ide.kyoto.jp","ine.kyoto.jp","joyo.kyoto.jp","kameoka.kyoto.jp","kamo.kyoto.jp","kita.kyoto.jp","kizu.kyoto.jp","kumiyama.kyoto.jp","kyotamba.kyoto.jp","kyotanabe.kyoto.jp","kyotango.kyoto.jp","maizuru.kyoto.jp","minami.kyoto.jp","minamiyamashiro.kyoto.jp","miyazu.kyoto.jp","muko.kyoto.jp","nagaokakyo.kyoto.jp","nakagyo.kyoto.jp","nantan.kyoto.jp","oyamazaki.kyoto.jp","sakyo.kyoto.jp","seika.kyoto.jp","tanabe.kyoto.jp","uji.kyoto.jp","ujitawara.kyoto.jp","wazuka.kyoto.jp","yamashina.kyoto.jp","yawata.kyoto.jp","asahi.mie.jp","inabe.mie.jp","ise.mie.jp","kameyama.mie.jp","kawagoe.mie.jp","kiho.mie.jp","kisosaki.mie.jp","kiwa.mie.jp","komono.mie.jp","kumano.mie.jp","kuwana.mie.jp","matsusaka.mie.jp","meiwa.mie.jp","mihama.mie.jp","minamiise.mie.jp","misugi.mie.jp","miyama.mie.jp","nabari.mie.jp","shima.mie.jp","suzuka.mie.jp","tado.mie.jp","taiki.mie.jp","taki.mie.jp","tamaki.mie.jp","toba.mie.jp","tsu.mie.jp","udono.mie.jp","ureshino.mie.jp","watarai.mie.jp","yokkaichi.mie.jp","furukawa.miyagi.jp","higashimatsushima.miyagi.jp","ishinomaki.miyagi.jp","iwanuma.miyagi.jp","kakuda.miyagi.jp","kami.miyagi.jp","kawasaki.miyagi.jp","marumori.miyagi.jp","matsushima.miyagi.jp","minamisanriku.miyagi.jp","misato.miyagi.jp","murata.miyagi.jp","natori.miyagi.jp","ogawara.miyagi.jp","ohira.miyagi.jp","onagawa.miyagi.jp","osaki.miyagi.jp","rifu.miyagi.jp","semine.miyagi.jp","shibata.miyagi.jp","shichikashuku.miyagi.jp","shikama.miyagi.jp","shiogama.miyagi.jp","shiroishi.miyagi.jp","tagajo.miyagi.jp","taiwa.miyagi.jp","tome.miyagi.jp","tomiya.miyagi.jp","wakuya.miyagi.jp","watari.miyagi.jp","yamamoto.miyagi.jp","zao.miyagi.jp","aya.miyazaki.jp","ebino.miyazaki.jp","gokase.miyazaki.jp","hyuga.miyazaki.jp","kadogawa.miyazaki.jp","kawaminami.miyazaki.jp","kijo.miyazaki.jp","kitagawa.miyazaki.jp","kitakata.miyazaki.jp","kitaura.miyazaki.jp","kobayashi.miyazaki.jp","kunitomi.miyazaki.jp","kushima.miyazaki.jp","mimata.miyazaki.jp","miyakonojo.miyazaki.jp","miyazaki.miyazaki.jp","morotsuka.miyazaki.jp","nichinan.miyazaki.jp","nishimera.miyazaki.jp","nobeoka.miyazaki.jp","saito.miyazaki.jp","shiiba.miyazaki.jp","shintomi.miyazaki.jp","takaharu.miyazaki.jp","takanabe.miyazaki.jp","takazaki.miyazaki.jp","tsuno.miyazaki.jp","achi.nagano.jp","agematsu.nagano.jp","anan.nagano.jp","aoki.nagano.jp","asahi.nagano.jp","azumino.nagano.jp","chikuhoku.nagano.jp","chikuma.nagano.jp","chino.nagano.jp","fujimi.nagano.jp","hakuba.nagano.jp","hara.nagano.jp","hiraya.nagano.jp","iida.nagano.jp","iijima.nagano.jp","iiyama.nagano.jp","iizuna.nagano.jp","ikeda.nagano.jp","ikusaka.nagano.jp","ina.nagano.jp","karuizawa.nagano.jp","kawakami.nagano.jp","kiso.nagano.jp","kisofukushima.nagano.jp","kitaaiki.nagano.jp","komagane.nagano.jp","komoro.nagano.jp","matsukawa.nagano.jp","matsumoto.nagano.jp","miasa.nagano.jp","minamiaiki.nagano.jp","minamimaki.nagano.jp","minamiminowa.nagano.jp","minowa.nagano.jp","miyada.nagano.jp","miyota.nagano.jp","mochizuki.nagano.jp","nagano.nagano.jp","nagawa.nagano.jp","nagiso.nagano.jp","nakagawa.nagano.jp","nakano.nagano.jp","nozawaonsen.nagano.jp","obuse.nagano.jp","ogawa.nagano.jp","okaya.nagano.jp","omachi.nagano.jp","omi.nagano.jp","ookuwa.nagano.jp","ooshika.nagano.jp","otaki.nagano.jp","otari.nagano.jp","sakae.nagano.jp","sakaki.nagano.jp","saku.nagano.jp","sakuho.nagano.jp","shimosuwa.nagano.jp","shinanomachi.nagano.jp","shiojiri.nagano.jp","suwa.nagano.jp","suzaka.nagano.jp","takagi.nagano.jp","takamori.nagano.jp","takayama.nagano.jp","tateshina.nagano.jp","tatsuno.nagano.jp","togakushi.nagano.jp","togura.nagano.jp","tomi.nagano.jp","ueda.nagano.jp","wada.nagano.jp","yamagata.nagano.jp","yamanouchi.nagano.jp","yasaka.nagano.jp","yasuoka.nagano.jp","chijiwa.nagasaki.jp","futsu.nagasaki.jp","goto.nagasaki.jp","hasami.nagasaki.jp","hirado.nagasaki.jp","iki.nagasaki.jp","isahaya.nagasaki.jp","kawatana.nagasaki.jp","kuchinotsu.nagasaki.jp","matsuura.nagasaki.jp","nagasaki.nagasaki.jp","obama.nagasaki.jp","omura.nagasaki.jp","oseto.nagasaki.jp","saikai.nagasaki.jp","sasebo.nagasaki.jp","seihi.nagasaki.jp","shimabara.nagasaki.jp","shinkamigoto.nagasaki.jp","togitsu.nagasaki.jp","tsushima.nagasaki.jp","unzen.nagasaki.jp","ando.nara.jp","gose.nara.jp","heguri.nara.jp","higashiyoshino.nara.jp","ikaruga.nara.jp","ikoma.nara.jp","kamikitayama.nara.jp","kanmaki.nara.jp","kashiba.nara.jp","kashihara.nara.jp","katsuragi.nara.jp","kawai.nara.jp","kawakami.nara.jp","kawanishi.nara.jp","koryo.nara.jp","kurotaki.nara.jp","mitsue.nara.jp","miyake.nara.jp","nara.nara.jp","nosegawa.nara.jp","oji.nara.jp","ouda.nara.jp","oyodo.nara.jp","sakurai.nara.jp","sango.nara.jp","shimoichi.nara.jp","shimokitayama.nara.jp","shinjo.nara.jp","soni.nara.jp","takatori.nara.jp","tawaramoto.nara.jp","tenkawa.nara.jp","tenri.nara.jp","uda.nara.jp","yamatokoriyama.nara.jp","yamatotakada.nara.jp","yamazoe.nara.jp","yoshino.nara.jp","aga.niigata.jp","agano.niigata.jp","gosen.niigata.jp","itoigawa.niigata.jp","izumozaki.niigata.jp","joetsu.niigata.jp","kamo.niigata.jp","kariwa.niigata.jp","kashiwazaki.niigata.jp","minamiuonuma.niigata.jp","mitsuke.niigata.jp","muika.niigata.jp","murakami.niigata.jp","myoko.niigata.jp","nagaoka.niigata.jp","niigata.niigata.jp","ojiya.niigata.jp","omi.niigata.jp","sado.niigata.jp","sanjo.niigata.jp","seiro.niigata.jp","seirou.niigata.jp","sekikawa.niigata.jp","shibata.niigata.jp","tagami.niigata.jp","tainai.niigata.jp","tochio.niigata.jp","tokamachi.niigata.jp","tsubame.niigata.jp","tsunan.niigata.jp","uonuma.niigata.jp","yahiko.niigata.jp","yoita.niigata.jp","yuzawa.niigata.jp","beppu.oita.jp","bungoono.oita.jp","bungotakada.oita.jp","hasama.oita.jp","hiji.oita.jp","himeshima.oita.jp","hita.oita.jp","kamitsue.oita.jp","kokonoe.oita.jp","kuju.oita.jp","kunisaki.oita.jp","kusu.oita.jp","oita.oita.jp","saiki.oita.jp","taketa.oita.jp","tsukumi.oita.jp","usa.oita.jp","usuki.oita.jp","yufu.oita.jp","akaiwa.okayama.jp","asakuchi.okayama.jp","bizen.okayama.jp","hayashima.okayama.jp","ibara.okayama.jp","kagamino.okayama.jp","kasaoka.okayama.jp","kibichuo.okayama.jp","kumenan.okayama.jp","kurashiki.okayama.jp","maniwa.okayama.jp","misaki.okayama.jp","nagi.okayama.jp","niimi.okayama.jp","nishiawakura.okayama.jp","okayama.okayama.jp","satosho.okayama.jp","setouchi.okayama.jp","shinjo.okayama.jp","shoo.okayama.jp","soja.okayama.jp","takahashi.okayama.jp","tamano.okayama.jp","tsuyama.okayama.jp","wake.okayama.jp","yakage.okayama.jp","aguni.okinawa.jp","ginowan.okinawa.jp","ginoza.okinawa.jp","gushikami.okinawa.jp","haebaru.okinawa.jp","higashi.okinawa.jp","hirara.okinawa.jp","iheya.okinawa.jp","ishigaki.okinawa.jp","ishikawa.okinawa.jp","itoman.okinawa.jp","izena.okinawa.jp","kadena.okinawa.jp","kin.okinawa.jp","kitadaito.okinawa.jp","kitanakagusuku.okinawa.jp","kumejima.okinawa.jp","kunigami.okinawa.jp","minamidaito.okinawa.jp","motobu.okinawa.jp","nago.okinawa.jp","naha.okinawa.jp","nakagusuku.okinawa.jp","nakijin.okinawa.jp","nanjo.okinawa.jp","nishihara.okinawa.jp","ogimi.okinawa.jp","okinawa.okinawa.jp","onna.okinawa.jp","shimoji.okinawa.jp","taketomi.okinawa.jp","tarama.okinawa.jp","tokashiki.okinawa.jp","tomigusuku.okinawa.jp","tonaki.okinawa.jp","urasoe.okinawa.jp","uruma.okinawa.jp","yaese.okinawa.jp","yomitan.okinawa.jp","yonabaru.okinawa.jp","yonaguni.okinawa.jp","zamami.okinawa.jp","abeno.osaka.jp","chihayaakasaka.osaka.jp","chuo.osaka.jp","daito.osaka.jp","fujiidera.osaka.jp","habikino.osaka.jp","hannan.osaka.jp","higashiosaka.osaka.jp","higashisumiyoshi.osaka.jp","higashiyodogawa.osaka.jp","hirakata.osaka.jp","ibaraki.osaka.jp","ikeda.osaka.jp","izumi.osaka.jp","izumiotsu.osaka.jp","izumisano.osaka.jp","kadoma.osaka.jp","kaizuka.osaka.jp","kanan.osaka.jp","kashiwara.osaka.jp","katano.osaka.jp","kawachinagano.osaka.jp","kishiwada.osaka.jp","kita.osaka.jp","kumatori.osaka.jp","matsubara.osaka.jp","minato.osaka.jp","minoh.osaka.jp","misaki.osaka.jp","moriguchi.osaka.jp","neyagawa.osaka.jp","nishi.osaka.jp","nose.osaka.jp","osakasayama.osaka.jp","sakai.osaka.jp","sayama.osaka.jp","sennan.osaka.jp","settsu.osaka.jp","shijonawate.osaka.jp","shimamoto.osaka.jp","suita.osaka.jp","tadaoka.osaka.jp","taishi.osaka.jp","tajiri.osaka.jp","takaishi.osaka.jp","takatsuki.osaka.jp","tondabayashi.osaka.jp","toyonaka.osaka.jp","toyono.osaka.jp","yao.osaka.jp","ariake.saga.jp","arita.saga.jp","fukudomi.saga.jp","genkai.saga.jp","hamatama.saga.jp","hizen.saga.jp","imari.saga.jp","kamimine.saga.jp","kanzaki.saga.jp","karatsu.saga.jp","kashima.saga.jp","kitagata.saga.jp","kitahata.saga.jp","kiyama.saga.jp","kouhoku.saga.jp","kyuragi.saga.jp","nishiarita.saga.jp","ogi.saga.jp","omachi.saga.jp","ouchi.saga.jp","saga.saga.jp","shiroishi.saga.jp","taku.saga.jp","tara.saga.jp","tosu.saga.jp","yoshinogari.saga.jp","arakawa.saitama.jp","asaka.saitama.jp","chichibu.saitama.jp","fujimi.saitama.jp","fujimino.saitama.jp","fukaya.saitama.jp","hanno.saitama.jp","hanyu.saitama.jp","hasuda.saitama.jp","hatogaya.saitama.jp","hatoyama.saitama.jp","hidaka.saitama.jp","higashichichibu.saitama.jp","higashimatsuyama.saitama.jp","honjo.saitama.jp","ina.saitama.jp","iruma.saitama.jp","iwatsuki.saitama.jp","kamiizumi.saitama.jp","kamikawa.saitama.jp","kamisato.saitama.jp","kasukabe.saitama.jp","kawagoe.saitama.jp","kawaguchi.saitama.jp","kawajima.saitama.jp","kazo.saitama.jp","kitamoto.saitama.jp","koshigaya.saitama.jp","kounosu.saitama.jp","kuki.saitama.jp","kumagaya.saitama.jp","matsubushi.saitama.jp","minano.saitama.jp","misato.saitama.jp","miyashiro.saitama.jp","miyoshi.saitama.jp","moroyama.saitama.jp","nagatoro.saitama.jp","namegawa.saitama.jp","niiza.saitama.jp","ogano.saitama.jp","ogawa.saitama.jp","ogose.saitama.jp","okegawa.saitama.jp","omiya.saitama.jp","otaki.saitama.jp","ranzan.saitama.jp","ryokami.saitama.jp","saitama.saitama.jp","sakado.saitama.jp","satte.saitama.jp","sayama.saitama.jp","shiki.saitama.jp","shiraoka.saitama.jp","soka.saitama.jp","sugito.saitama.jp","toda.saitama.jp","tokigawa.saitama.jp","tokorozawa.saitama.jp","tsurugashima.saitama.jp","urawa.saitama.jp","warabi.saitama.jp","yashio.saitama.jp","yokoze.saitama.jp","yono.saitama.jp","yorii.saitama.jp","yoshida.saitama.jp","yoshikawa.saitama.jp","yoshimi.saitama.jp","aisho.shiga.jp","gamo.shiga.jp","higashiomi.shiga.jp","hikone.shiga.jp","koka.shiga.jp","konan.shiga.jp","kosei.shiga.jp","koto.shiga.jp","kusatsu.shiga.jp","maibara.shiga.jp","moriyama.shiga.jp","nagahama.shiga.jp","nishiazai.shiga.jp","notogawa.shiga.jp","omihachiman.shiga.jp","otsu.shiga.jp","ritto.shiga.jp","ryuoh.shiga.jp","takashima.shiga.jp","takatsuki.shiga.jp","torahime.shiga.jp","toyosato.shiga.jp","yasu.shiga.jp","akagi.shimane.jp","ama.shimane.jp","gotsu.shimane.jp","hamada.shimane.jp","higashiizumo.shimane.jp","hikawa.shimane.jp","hikimi.shimane.jp","izumo.shimane.jp","kakinoki.shimane.jp","masuda.shimane.jp","matsue.shimane.jp","misato.shimane.jp","nishinoshima.shimane.jp","ohda.shimane.jp","okinoshima.shimane.jp","okuizumo.shimane.jp","shimane.shimane.jp","tamayu.shimane.jp","tsuwano.shimane.jp","unnan.shimane.jp","yakumo.shimane.jp","yasugi.shimane.jp","yatsuka.shimane.jp","arai.shizuoka.jp","atami.shizuoka.jp","fuji.shizuoka.jp","fujieda.shizuoka.jp","fujikawa.shizuoka.jp","fujinomiya.shizuoka.jp","fukuroi.shizuoka.jp","gotemba.shizuoka.jp","haibara.shizuoka.jp","hamamatsu.shizuoka.jp","higashiizu.shizuoka.jp","ito.shizuoka.jp","iwata.shizuoka.jp","izu.shizuoka.jp","izunokuni.shizuoka.jp","kakegawa.shizuoka.jp","kannami.shizuoka.jp","kawanehon.shizuoka.jp","kawazu.shizuoka.jp","kikugawa.shizuoka.jp","kosai.shizuoka.jp","makinohara.shizuoka.jp","matsuzaki.shizuoka.jp","minamiizu.shizuoka.jp","mishima.shizuoka.jp","morimachi.shizuoka.jp","nishiizu.shizuoka.jp","numazu.shizuoka.jp","omaezaki.shizuoka.jp","shimada.shizuoka.jp","shimizu.shizuoka.jp","shimoda.shizuoka.jp","shizuoka.shizuoka.jp","susono.shizuoka.jp","yaizu.shizuoka.jp","yoshida.shizuoka.jp","ashikaga.tochigi.jp","bato.tochigi.jp","haga.tochigi.jp","ichikai.tochigi.jp","iwafune.tochigi.jp","kaminokawa.tochigi.jp","kanuma.tochigi.jp","karasuyama.tochigi.jp","kuroiso.tochigi.jp","mashiko.tochigi.jp","mibu.tochigi.jp","moka.tochigi.jp","motegi.tochigi.jp","nasu.tochigi.jp","nasushiobara.tochigi.jp","nikko.tochigi.jp","nishikata.tochigi.jp","nogi.tochigi.jp","ohira.tochigi.jp","ohtawara.tochigi.jp","oyama.tochigi.jp","sakura.tochigi.jp","sano.tochigi.jp","shimotsuke.tochigi.jp","shioya.tochigi.jp","takanezawa.tochigi.jp","tochigi.tochigi.jp","tsuga.tochigi.jp","ujiie.tochigi.jp","utsunomiya.tochigi.jp","yaita.tochigi.jp","aizumi.tokushima.jp","anan.tokushima.jp","ichiba.tokushima.jp","itano.tokushima.jp","kainan.tokushima.jp","komatsushima.tokushima.jp","matsushige.tokushima.jp","mima.tokushima.jp","minami.tokushima.jp","miyoshi.tokushima.jp","mugi.tokushima.jp","nakagawa.tokushima.jp","naruto.tokushima.jp","sanagochi.tokushima.jp","shishikui.tokushima.jp","tokushima.tokushima.jp","wajiki.tokushima.jp","adachi.tokyo.jp","akiruno.tokyo.jp","akishima.tokyo.jp","aogashima.tokyo.jp","arakawa.tokyo.jp","bunkyo.tokyo.jp","chiyoda.tokyo.jp","chofu.tokyo.jp","chuo.tokyo.jp","edogawa.tokyo.jp","fuchu.tokyo.jp","fussa.tokyo.jp","hachijo.tokyo.jp","hachioji.tokyo.jp","hamura.tokyo.jp","higashikurume.tokyo.jp","higashimurayama.tokyo.jp","higashiyamato.tokyo.jp","hino.tokyo.jp","hinode.tokyo.jp","hinohara.tokyo.jp","inagi.tokyo.jp","itabashi.tokyo.jp","katsushika.tokyo.jp","kita.tokyo.jp","kiyose.tokyo.jp","kodaira.tokyo.jp","koganei.tokyo.jp","kokubunji.tokyo.jp","komae.tokyo.jp","koto.tokyo.jp","kouzushima.tokyo.jp","kunitachi.tokyo.jp","machida.tokyo.jp","meguro.tokyo.jp","minato.tokyo.jp","mitaka.tokyo.jp","mizuho.tokyo.jp","musashimurayama.tokyo.jp","musashino.tokyo.jp","nakano.tokyo.jp","nerima.tokyo.jp","ogasawara.tokyo.jp","okutama.tokyo.jp","ome.tokyo.jp","oshima.tokyo.jp","ota.tokyo.jp","setagaya.tokyo.jp","shibuya.tokyo.jp","shinagawa.tokyo.jp","shinjuku.tokyo.jp","suginami.tokyo.jp","sumida.tokyo.jp","tachikawa.tokyo.jp","taito.tokyo.jp","tama.tokyo.jp","toshima.tokyo.jp","chizu.tottori.jp","hino.tottori.jp","kawahara.tottori.jp","koge.tottori.jp","kotoura.tottori.jp","misasa.tottori.jp","nanbu.tottori.jp","nichinan.tottori.jp","sakaiminato.tottori.jp","tottori.tottori.jp","wakasa.tottori.jp","yazu.tottori.jp","yonago.tottori.jp","asahi.toyama.jp","fuchu.toyama.jp","fukumitsu.toyama.jp","funahashi.toyama.jp","himi.toyama.jp","imizu.toyama.jp","inami.toyama.jp","johana.toyama.jp","kamiichi.toyama.jp","kurobe.toyama.jp","nakaniikawa.toyama.jp","namerikawa.toyama.jp","nanto.toyama.jp","nyuzen.toyama.jp","oyabe.toyama.jp","taira.toyama.jp","takaoka.toyama.jp","tateyama.toyama.jp","toga.toyama.jp","tonami.toyama.jp","toyama.toyama.jp","unazuki.toyama.jp","uozu.toyama.jp","yamada.toyama.jp","arida.wakayama.jp","aridagawa.wakayama.jp","gobo.wakayama.jp","hashimoto.wakayama.jp","hidaka.wakayama.jp","hirogawa.wakayama.jp","inami.wakayama.jp","iwade.wakayama.jp","kainan.wakayama.jp","kamitonda.wakayama.jp","katsuragi.wakayama.jp","kimino.wakayama.jp","kinokawa.wakayama.jp","kitayama.wakayama.jp","koya.wakayama.jp","koza.wakayama.jp","kozagawa.wakayama.jp","kudoyama.wakayama.jp","kushimoto.wakayama.jp","mihama.wakayama.jp","misato.wakayama.jp","nachikatsuura.wakayama.jp","shingu.wakayama.jp","shirahama.wakayama.jp","taiji.wakayama.jp","tanabe.wakayama.jp","wakayama.wakayama.jp","yuasa.wakayama.jp","yura.wakayama.jp","asahi.yamagata.jp","funagata.yamagata.jp","higashine.yamagata.jp","iide.yamagata.jp","kahoku.yamagata.jp","kaminoyama.yamagata.jp","kaneyama.yamagata.jp","kawanishi.yamagata.jp","mamurogawa.yamagata.jp","mikawa.yamagata.jp","murayama.yamagata.jp","nagai.yamagata.jp","nakayama.yamagata.jp","nanyo.yamagata.jp","nishikawa.yamagata.jp","obanazawa.yamagata.jp","oe.yamagata.jp","oguni.yamagata.jp","ohkura.yamagata.jp","oishida.yamagata.jp","sagae.yamagata.jp","sakata.yamagata.jp","sakegawa.yamagata.jp","shinjo.yamagata.jp","shirataka.yamagata.jp","shonai.yamagata.jp","takahata.yamagata.jp","tendo.yamagata.jp","tozawa.yamagata.jp","tsuruoka.yamagata.jp","yamagata.yamagata.jp","yamanobe.yamagata.jp","yonezawa.yamagata.jp","yuza.yamagata.jp","abu.yamaguchi.jp","hagi.yamaguchi.jp","hikari.yamaguchi.jp","hofu.yamaguchi.jp","iwakuni.yamaguchi.jp","kudamatsu.yamaguchi.jp","mitou.yamaguchi.jp","nagato.yamaguchi.jp","oshima.yamaguchi.jp","shimonoseki.yamaguchi.jp","shunan.yamaguchi.jp","tabuse.yamaguchi.jp","tokuyama.yamaguchi.jp","toyota.yamaguchi.jp","ube.yamaguchi.jp","yuu.yamaguchi.jp","chuo.yamanashi.jp","doshi.yamanashi.jp","fuefuki.yamanashi.jp","fujikawa.yamanashi.jp","fujikawaguchiko.yamanashi.jp","fujiyoshida.yamanashi.jp","hayakawa.yamanashi.jp","hokuto.yamanashi.jp","ichikawamisato.yamanashi.jp","kai.yamanashi.jp","kofu.yamanashi.jp","koshu.yamanashi.jp","kosuge.yamanashi.jp","minami-alps.yamanashi.jp","minobu.yamanashi.jp","nakamichi.yamanashi.jp","nanbu.yamanashi.jp","narusawa.yamanashi.jp","nirasaki.yamanashi.jp","nishikatsura.yamanashi.jp","oshino.yamanashi.jp","otsuki.yamanashi.jp","showa.yamanashi.jp","tabayama.yamanashi.jp","tsuru.yamanashi.jp","uenohara.yamanashi.jp","yamanakako.yamanashi.jp","yamanashi.yamanashi.jp","ke","ac.ke","co.ke","go.ke","info.ke","me.ke","mobi.ke","ne.ke","or.ke","sc.ke","kg","org.kg","net.kg","com.kg","edu.kg","gov.kg","mil.kg","*.kh","ki","edu.ki","biz.ki","net.ki","org.ki","gov.ki","info.ki","com.ki","km","org.km","nom.km","gov.km","prd.km","tm.km","edu.km","mil.km","ass.km","com.km","coop.km","asso.km","presse.km","medecin.km","notaires.km","pharmaciens.km","veterinaire.km","gouv.km","kn","net.kn","org.kn","edu.kn","gov.kn","kp","com.kp","edu.kp","gov.kp","org.kp","rep.kp","tra.kp","kr","ac.kr","co.kr","es.kr","go.kr","hs.kr","kg.kr","mil.kr","ms.kr","ne.kr","or.kr","pe.kr","re.kr","sc.kr","busan.kr","chungbuk.kr","chungnam.kr","daegu.kr","daejeon.kr","gangwon.kr","gwangju.kr","gyeongbuk.kr","gyeonggi.kr","gyeongnam.kr","incheon.kr","jeju.kr","jeonbuk.kr","jeonnam.kr","seoul.kr","ulsan.kr","kw","com.kw","edu.kw","emb.kw","gov.kw","ind.kw","net.kw","org.kw","ky","edu.ky","gov.ky","com.ky","org.ky","net.ky","kz","org.kz","edu.kz","net.kz","gov.kz","mil.kz","com.kz","la","int.la","net.la","info.la","edu.la","gov.la","per.la","com.la","org.la","lb","com.lb","edu.lb","gov.lb","net.lb","org.lb","lc","com.lc","net.lc","co.lc","org.lc","edu.lc","gov.lc","li","lk","gov.lk","sch.lk","net.lk","int.lk","com.lk","org.lk","edu.lk","ngo.lk","soc.lk","web.lk","ltd.lk","assn.lk","grp.lk","hotel.lk","ac.lk","lr","com.lr","edu.lr","gov.lr","org.lr","net.lr","ls","ac.ls","biz.ls","co.ls","edu.ls","gov.ls","info.ls","net.ls","org.ls","sc.ls","lt","gov.lt","lu","lv","com.lv","edu.lv","gov.lv","org.lv","mil.lv","id.lv","net.lv","asn.lv","conf.lv","ly","com.ly","net.ly","gov.ly","plc.ly","edu.ly","sch.ly","med.ly","org.ly","id.ly","ma","co.ma","net.ma","gov.ma","org.ma","ac.ma","press.ma","mc","tm.mc","asso.mc","md","me","co.me","net.me","org.me","edu.me","ac.me","gov.me","its.me","priv.me","mg","org.mg","nom.mg","gov.mg","prd.mg","tm.mg","edu.mg","mil.mg","com.mg","co.mg","mh","mil","mk","com.mk","org.mk","net.mk","edu.mk","gov.mk","inf.mk","name.mk","ml","com.ml","edu.ml","gouv.ml","gov.ml","net.ml","org.ml","presse.ml","*.mm","mn","gov.mn","edu.mn","org.mn","mo","com.mo","net.mo","org.mo","edu.mo","gov.mo","mobi","mp","mq","mr","gov.mr","ms","com.ms","edu.ms","gov.ms","net.ms","org.ms","mt","com.mt","edu.mt","net.mt","org.mt","mu","com.mu","net.mu","org.mu","gov.mu","ac.mu","co.mu","or.mu","museum","academy.museum","agriculture.museum","air.museum","airguard.museum","alabama.museum","alaska.museum","amber.museum","ambulance.museum","american.museum","americana.museum","americanantiques.museum","americanart.museum","amsterdam.museum","and.museum","annefrank.museum","anthro.museum","anthropology.museum","antiques.museum","aquarium.museum","arboretum.museum","archaeological.museum","archaeology.museum","architecture.museum","art.museum","artanddesign.museum","artcenter.museum","artdeco.museum","arteducation.museum","artgallery.museum","arts.museum","artsandcrafts.museum","asmatart.museum","assassination.museum","assisi.museum","association.museum","astronomy.museum","atlanta.museum","austin.museum","australia.museum","automotive.museum","aviation.museum","axis.museum","badajoz.museum","baghdad.museum","bahn.museum","bale.museum","baltimore.museum","barcelona.museum","baseball.museum","basel.museum","baths.museum","bauern.museum","beauxarts.museum","beeldengeluid.museum","bellevue.museum","bergbau.museum","berkeley.museum","berlin.museum","bern.museum","bible.museum","bilbao.museum","bill.museum","birdart.museum","birthplace.museum","bonn.museum","boston.museum","botanical.museum","botanicalgarden.museum","botanicgarden.museum","botany.museum","brandywinevalley.museum","brasil.museum","bristol.museum","british.museum","britishcolumbia.museum","broadcast.museum","brunel.museum","brussel.museum","brussels.museum","bruxelles.museum","building.museum","burghof.museum","bus.museum","bushey.museum","cadaques.museum","california.museum","cambridge.museum","can.museum","canada.museum","capebreton.museum","carrier.museum","cartoonart.museum","casadelamoneda.museum","castle.museum","castres.museum","celtic.museum","center.museum","chattanooga.museum","cheltenham.museum","chesapeakebay.museum","chicago.museum","children.museum","childrens.museum","childrensgarden.museum","chiropractic.museum","chocolate.museum","christiansburg.museum","cincinnati.museum","cinema.museum","circus.museum","civilisation.museum","civilization.museum","civilwar.museum","clinton.museum","clock.museum","coal.museum","coastaldefence.museum","cody.museum","coldwar.museum","collection.museum","colonialwilliamsburg.museum","coloradoplateau.museum","columbia.museum","columbus.museum","communication.museum","communications.museum","community.museum","computer.museum","computerhistory.museum","comunicações.museum","contemporary.museum","contemporaryart.museum","convent.museum","copenhagen.museum","corporation.museum","correios-e-telecomunicações.museum","corvette.museum","costume.museum","countryestate.museum","county.museum","crafts.museum","cranbrook.museum","creation.museum","cultural.museum","culturalcenter.museum","culture.museum","cyber.museum","cymru.museum","dali.museum","dallas.museum","database.museum","ddr.museum","decorativearts.museum","delaware.museum","delmenhorst.museum","denmark.museum","depot.museum","design.museum","detroit.museum","dinosaur.museum","discovery.museum","dolls.museum","donostia.museum","durham.museum","eastafrica.museum","eastcoast.museum","education.museum","educational.museum","egyptian.museum","eisenbahn.museum","elburg.museum","elvendrell.museum","embroidery.museum","encyclopedic.museum","england.museum","entomology.museum","environment.museum","environmentalconservation.museum","epilepsy.museum","essex.museum","estate.museum","ethnology.museum","exeter.museum","exhibition.museum","family.museum","farm.museum","farmequipment.museum","farmers.museum","farmstead.museum","field.museum","figueres.museum","filatelia.museum","film.museum","fineart.museum","finearts.museum","finland.museum","flanders.museum","florida.museum","force.museum","fortmissoula.museum","fortworth.museum","foundation.museum","francaise.museum","frankfurt.museum","franziskaner.museum","freemasonry.museum","freiburg.museum","fribourg.museum","frog.museum","fundacio.museum","furniture.museum","gallery.museum","garden.museum","gateway.museum","geelvinck.museum","gemological.museum","geology.museum","georgia.museum","giessen.museum","glas.museum","glass.museum","gorge.museum","grandrapids.museum","graz.museum","guernsey.museum","halloffame.museum","hamburg.museum","handson.museum","harvestcelebration.museum","hawaii.museum","health.museum","heimatunduhren.museum","hellas.museum","helsinki.museum","hembygdsforbund.museum","heritage.museum","histoire.museum","historical.museum","historicalsociety.museum","historichouses.museum","historisch.museum","historisches.museum","history.museum","historyofscience.museum","horology.museum","house.museum","humanities.museum","illustration.museum","imageandsound.museum","indian.museum","indiana.museum","indianapolis.museum","indianmarket.museum","intelligence.museum","interactive.museum","iraq.museum","iron.museum","isleofman.museum","jamison.museum","jefferson.museum","jerusalem.museum","jewelry.museum","jewish.museum","jewishart.museum","jfk.museum","journalism.museum","judaica.museum","judygarland.museum","juedisches.museum","juif.museum","karate.museum","karikatur.museum","kids.museum","koebenhavn.museum","koeln.museum","kunst.museum","kunstsammlung.museum","kunstunddesign.museum","labor.museum","labour.museum","lajolla.museum","lancashire.museum","landes.museum","lans.museum","läns.museum","larsson.museum","lewismiller.museum","lincoln.museum","linz.museum","living.museum","livinghistory.museum","localhistory.museum","london.museum","losangeles.museum","louvre.museum","loyalist.museum","lucerne.museum","luxembourg.museum","luzern.museum","mad.museum","madrid.museum","mallorca.museum","manchester.museum","mansion.museum","mansions.museum","manx.museum","marburg.museum","maritime.museum","maritimo.museum","maryland.museum","marylhurst.museum","media.museum","medical.museum","medizinhistorisches.museum","meeres.museum","memorial.museum","mesaverde.museum","michigan.museum","midatlantic.museum","military.museum","mill.museum","miners.museum","mining.museum","minnesota.museum","missile.museum","missoula.museum","modern.museum","moma.museum","money.museum","monmouth.museum","monticello.museum","montreal.museum","moscow.museum","motorcycle.museum","muenchen.museum","muenster.museum","mulhouse.museum","muncie.museum","museet.museum","museumcenter.museum","museumvereniging.museum","music.museum","national.museum","nationalfirearms.museum","nationalheritage.museum","nativeamerican.museum","naturalhistory.museum","naturalhistorymuseum.museum","naturalsciences.museum","nature.museum","naturhistorisches.museum","natuurwetenschappen.museum","naumburg.museum","naval.museum","nebraska.museum","neues.museum","newhampshire.museum","newjersey.museum","newmexico.museum","newport.museum","newspaper.museum","newyork.museum","niepce.museum","norfolk.museum","north.museum","nrw.museum","nyc.museum","nyny.museum","oceanographic.museum","oceanographique.museum","omaha.museum","online.museum","ontario.museum","openair.museum","oregon.museum","oregontrail.museum","otago.museum","oxford.museum","pacific.museum","paderborn.museum","palace.museum","paleo.museum","palmsprings.museum","panama.museum","paris.museum","pasadena.museum","pharmacy.museum","philadelphia.museum","philadelphiaarea.museum","philately.museum","phoenix.museum","photography.museum","pilots.museum","pittsburgh.museum","planetarium.museum","plantation.museum","plants.museum","plaza.museum","portal.museum","portland.museum","portlligat.museum","posts-and-telecommunications.museum","preservation.museum","presidio.museum","press.museum","project.museum","public.museum","pubol.museum","quebec.museum","railroad.museum","railway.museum","research.museum","resistance.museum","riodejaneiro.museum","rochester.museum","rockart.museum","roma.museum","russia.museum","saintlouis.museum","salem.museum","salvadordali.museum","salzburg.museum","sandiego.museum","sanfrancisco.museum","santabarbara.museum","santacruz.museum","santafe.museum","saskatchewan.museum","satx.museum","savannahga.museum","schlesisches.museum","schoenbrunn.museum","schokoladen.museum","school.museum","schweiz.museum","science.museum","scienceandhistory.museum","scienceandindustry.museum","sciencecenter.museum","sciencecenters.museum","science-fiction.museum","sciencehistory.museum","sciences.museum","sciencesnaturelles.museum","scotland.museum","seaport.museum","settlement.museum","settlers.museum","shell.museum","sherbrooke.museum","sibenik.museum","silk.museum","ski.museum","skole.museum","society.museum","sologne.museum","soundandvision.museum","southcarolina.museum","southwest.museum","space.museum","spy.museum","square.museum","stadt.museum","stalbans.museum","starnberg.museum","state.museum","stateofdelaware.museum","station.museum","steam.museum","steiermark.museum","stjohn.museum","stockholm.museum","stpetersburg.museum","stuttgart.museum","suisse.museum","surgeonshall.museum","surrey.museum","svizzera.museum","sweden.museum","sydney.museum","tank.museum","tcm.museum","technology.museum","telekommunikation.museum","television.museum","texas.museum","textile.museum","theater.museum","time.museum","timekeeping.museum","topology.museum","torino.museum","touch.museum","town.museum","transport.museum","tree.museum","trolley.museum","trust.museum","trustee.museum","uhren.museum","ulm.museum","undersea.museum","university.museum","usa.museum","usantiques.museum","usarts.museum","uscountryestate.museum","usculture.museum","usdecorativearts.museum","usgarden.museum","ushistory.museum","ushuaia.museum","uslivinghistory.museum","utah.museum","uvic.museum","valley.museum","vantaa.museum","versailles.museum","viking.museum","village.museum","virginia.museum","virtual.museum","virtuel.museum","vlaanderen.museum","volkenkunde.museum","wales.museum","wallonie.museum","war.museum","washingtondc.museum","watchandclock.museum","watch-and-clock.museum","western.museum","westfalen.museum","whaling.museum","wildlife.museum","williamsburg.museum","windmill.museum","workshop.museum","york.museum","yorkshire.museum","yosemite.museum","youth.museum","zoological.museum","zoology.museum","ירושלים.museum","иком.museum","mv","aero.mv","biz.mv","com.mv","coop.mv","edu.mv","gov.mv","info.mv","int.mv","mil.mv","museum.mv","name.mv","net.mv","org.mv","pro.mv","mw","ac.mw","biz.mw","co.mw","com.mw","coop.mw","edu.mw","gov.mw","int.mw","museum.mw","net.mw","org.mw","mx","com.mx","org.mx","gob.mx","edu.mx","net.mx","my","com.my","net.my","org.my","gov.my","edu.my","mil.my","name.my","mz","ac.mz","adv.mz","co.mz","edu.mz","gov.mz","mil.mz","net.mz","org.mz","na","info.na","pro.na","name.na","school.na","or.na","dr.na","us.na","mx.na","ca.na","in.na","cc.na","tv.na","ws.na","mobi.na","co.na","com.na","org.na","name","nc","asso.nc","nom.nc","ne","net","nf","com.nf","net.nf","per.nf","rec.nf","web.nf","arts.nf","firm.nf","info.nf","other.nf","store.nf","ng","com.ng","edu.ng","gov.ng","i.ng","mil.ng","mobi.ng","name.ng","net.ng","org.ng","sch.ng","ni","ac.ni","biz.ni","co.ni","com.ni","edu.ni","gob.ni","in.ni","info.ni","int.ni","mil.ni","net.ni","nom.ni","org.ni","web.ni","nl","no","fhs.no","vgs.no","fylkesbibl.no","folkebibl.no","museum.no","idrett.no","priv.no","mil.no","stat.no","dep.no","kommune.no","herad.no","aa.no","ah.no","bu.no","fm.no","hl.no","hm.no","jan-mayen.no","mr.no","nl.no","nt.no","of.no","ol.no","oslo.no","rl.no","sf.no","st.no","svalbard.no","tm.no","tr.no","va.no","vf.no","gs.aa.no","gs.ah.no","gs.bu.no","gs.fm.no","gs.hl.no","gs.hm.no","gs.jan-mayen.no","gs.mr.no","gs.nl.no","gs.nt.no","gs.of.no","gs.ol.no","gs.oslo.no","gs.rl.no","gs.sf.no","gs.st.no","gs.svalbard.no","gs.tm.no","gs.tr.no","gs.va.no","gs.vf.no","akrehamn.no","åkrehamn.no","algard.no","ålgård.no","arna.no","brumunddal.no","bryne.no","bronnoysund.no","brønnøysund.no","drobak.no","drøbak.no","egersund.no","fetsund.no","floro.no","florø.no","fredrikstad.no","hokksund.no","honefoss.no","hønefoss.no","jessheim.no","jorpeland.no","jørpeland.no","kirkenes.no","kopervik.no","krokstadelva.no","langevag.no","langevåg.no","leirvik.no","mjondalen.no","mjøndalen.no","mo-i-rana.no","mosjoen.no","mosjøen.no","nesoddtangen.no","orkanger.no","osoyro.no","osøyro.no","raholt.no","råholt.no","sandnessjoen.no","sandnessjøen.no","skedsmokorset.no","slattum.no","spjelkavik.no","stathelle.no","stavern.no","stjordalshalsen.no","stjørdalshalsen.no","tananger.no","tranby.no","vossevangen.no","afjord.no","åfjord.no","agdenes.no","al.no","ål.no","alesund.no","ålesund.no","alstahaug.no","alta.no","áltá.no","alaheadju.no","álaheadju.no","alvdal.no","amli.no","åmli.no","amot.no","åmot.no","andebu.no","andoy.no","andøy.no","andasuolo.no","ardal.no","årdal.no","aremark.no","arendal.no","ås.no","aseral.no","åseral.no","asker.no","askim.no","askvoll.no","askoy.no","askøy.no","asnes.no","åsnes.no","audnedaln.no","aukra.no","aure.no","aurland.no","aurskog-holand.no","aurskog-høland.no","austevoll.no","austrheim.no","averoy.no","averøy.no","balestrand.no","ballangen.no","balat.no","bálát.no","balsfjord.no","bahccavuotna.no","báhccavuotna.no","bamble.no","bardu.no","beardu.no","beiarn.no","bajddar.no","bájddar.no","baidar.no","báidár.no","berg.no","bergen.no","berlevag.no","berlevåg.no","bearalvahki.no","bearalváhki.no","bindal.no","birkenes.no","bjarkoy.no","bjarkøy.no","bjerkreim.no","bjugn.no","bodo.no","bodø.no","badaddja.no","bådåddjå.no","budejju.no","bokn.no","bremanger.no","bronnoy.no","brønnøy.no","bygland.no","bykle.no","barum.no","bærum.no","bo.telemark.no","bø.telemark.no","bo.nordland.no","bø.nordland.no","bievat.no","bievát.no","bomlo.no","bømlo.no","batsfjord.no","båtsfjord.no","bahcavuotna.no","báhcavuotna.no","dovre.no","drammen.no","drangedal.no","dyroy.no","dyrøy.no","donna.no","dønna.no","eid.no","eidfjord.no","eidsberg.no","eidskog.no","eidsvoll.no","eigersund.no","elverum.no","enebakk.no","engerdal.no","etne.no","etnedal.no","evenes.no","evenassi.no","evenášši.no","evje-og-hornnes.no","farsund.no","fauske.no","fuossko.no","fuoisku.no","fedje.no","fet.no","finnoy.no","finnøy.no","fitjar.no","fjaler.no","fjell.no","flakstad.no","flatanger.no","flekkefjord.no","flesberg.no","flora.no","fla.no","flå.no","folldal.no","forsand.no","fosnes.no","frei.no","frogn.no","froland.no","frosta.no","frana.no","fræna.no","froya.no","frøya.no","fusa.no","fyresdal.no","forde.no","førde.no","gamvik.no","gangaviika.no","gáŋgaviika.no","gaular.no","gausdal.no","gildeskal.no","gildeskål.no","giske.no","gjemnes.no","gjerdrum.no","gjerstad.no","gjesdal.no","gjovik.no","gjøvik.no","gloppen.no","gol.no","gran.no","grane.no","granvin.no","gratangen.no","grimstad.no","grong.no","kraanghke.no","kråanghke.no","grue.no","gulen.no","hadsel.no","halden.no","halsa.no","hamar.no","hamaroy.no","habmer.no","hábmer.no","hapmir.no","hápmir.no","hammerfest.no","hammarfeasta.no","hámmárfeasta.no","haram.no","hareid.no","harstad.no","hasvik.no","aknoluokta.no","ákŋoluokta.no","hattfjelldal.no","aarborte.no","haugesund.no","hemne.no","hemnes.no","hemsedal.no","heroy.more-og-romsdal.no","herøy.møre-og-romsdal.no","heroy.nordland.no","herøy.nordland.no","hitra.no","hjartdal.no","hjelmeland.no","hobol.no","hobøl.no","hof.no","hol.no","hole.no","holmestrand.no","holtalen.no","holtålen.no","hornindal.no","horten.no","hurdal.no","hurum.no","hvaler.no","hyllestad.no","hagebostad.no","hægebostad.no","hoyanger.no","høyanger.no","hoylandet.no","høylandet.no","ha.no","hå.no","ibestad.no","inderoy.no","inderøy.no","iveland.no","jevnaker.no","jondal.no","jolster.no","jølster.no","karasjok.no","karasjohka.no","kárášjohka.no","karlsoy.no","galsa.no","gálsá.no","karmoy.no","karmøy.no","kautokeino.no","guovdageaidnu.no","klepp.no","klabu.no","klæbu.no","kongsberg.no","kongsvinger.no","kragero.no","kragerø.no","kristiansand.no","kristiansund.no","krodsherad.no","krødsherad.no","kvalsund.no","rahkkeravju.no","ráhkkerávju.no","kvam.no","kvinesdal.no","kvinnherad.no","kviteseid.no","kvitsoy.no","kvitsøy.no","kvafjord.no","kvæfjord.no","giehtavuoatna.no","kvanangen.no","kvænangen.no","navuotna.no","návuotna.no","kafjord.no","kåfjord.no","gaivuotna.no","gáivuotna.no","larvik.no","lavangen.no","lavagis.no","loabat.no","loabát.no","lebesby.no","davvesiida.no","leikanger.no","leirfjord.no","leka.no","leksvik.no","lenvik.no","leangaviika.no","leaŋgaviika.no","lesja.no","levanger.no","lier.no","lierne.no","lillehammer.no","lillesand.no","lindesnes.no","lindas.no","lindås.no","lom.no","loppa.no","lahppi.no","láhppi.no","lund.no","lunner.no","luroy.no","lurøy.no","luster.no","lyngdal.no","lyngen.no","ivgu.no","lardal.no","lerdal.no","lærdal.no","lodingen.no","lødingen.no","lorenskog.no","lørenskog.no","loten.no","løten.no","malvik.no","masoy.no","måsøy.no","muosat.no","muosát.no","mandal.no","marker.no","marnardal.no","masfjorden.no","meland.no","meldal.no","melhus.no","meloy.no","meløy.no","meraker.no","meråker.no","moareke.no","moåreke.no","midsund.no","midtre-gauldal.no","modalen.no","modum.no","molde.no","moskenes.no","moss.no","mosvik.no","malselv.no","målselv.no","malatvuopmi.no","málatvuopmi.no","namdalseid.no","aejrie.no","namsos.no","namsskogan.no","naamesjevuemie.no","nååmesjevuemie.no","laakesvuemie.no","nannestad.no","narvik.no","narviika.no","naustdal.no","nedre-eiker.no","nes.akershus.no","nes.buskerud.no","nesna.no","nesodden.no","nesseby.no","unjarga.no","unjárga.no","nesset.no","nissedal.no","nittedal.no","nord-aurdal.no","nord-fron.no","nord-odal.no","norddal.no","nordkapp.no","davvenjarga.no","davvenjárga.no","nordre-land.no","nordreisa.no","raisa.no","ráisa.no","nore-og-uvdal.no","notodden.no","naroy.no","nærøy.no","notteroy.no","nøtterøy.no","odda.no","oksnes.no","øksnes.no","oppdal.no","oppegard.no","oppegård.no","orkdal.no","orland.no","ørland.no","orskog.no","ørskog.no","orsta.no","ørsta.no","os.hedmark.no","os.hordaland.no","osen.no","osteroy.no","osterøy.no","ostre-toten.no","østre-toten.no","overhalla.no","ovre-eiker.no","øvre-eiker.no","oyer.no","øyer.no","oygarden.no","øygarden.no","oystre-slidre.no","øystre-slidre.no","porsanger.no","porsangu.no","porsáŋgu.no","porsgrunn.no","radoy.no","radøy.no","rakkestad.no","rana.no","ruovat.no","randaberg.no","rauma.no","rendalen.no","rennebu.no","rennesoy.no","rennesøy.no","rindal.no","ringebu.no","ringerike.no","ringsaker.no","rissa.no","risor.no","risør.no","roan.no","rollag.no","rygge.no","ralingen.no","rælingen.no","rodoy.no","rødøy.no","romskog.no","rømskog.no","roros.no","røros.no","rost.no","røst.no","royken.no","røyken.no","royrvik.no","røyrvik.no","rade.no","råde.no","salangen.no","siellak.no","saltdal.no","salat.no","sálát.no","sálat.no","samnanger.no","sande.more-og-romsdal.no","sande.møre-og-romsdal.no","sande.vestfold.no","sandefjord.no","sandnes.no","sandoy.no","sandøy.no","sarpsborg.no","sauda.no","sauherad.no","sel.no","selbu.no","selje.no","seljord.no","sigdal.no","siljan.no","sirdal.no","skaun.no","skedsmo.no","ski.no","skien.no","skiptvet.no","skjervoy.no","skjervøy.no","skierva.no","skiervá.no","skjak.no","skjåk.no","skodje.no","skanland.no","skånland.no","skanit.no","skánit.no","smola.no","smøla.no","snillfjord.no","snasa.no","snåsa.no","snoasa.no","snaase.no","snåase.no","sogndal.no","sokndal.no","sola.no","solund.no","songdalen.no","sortland.no","spydeberg.no","stange.no","stavanger.no","steigen.no","steinkjer.no","stjordal.no","stjørdal.no","stokke.no","stor-elvdal.no","stord.no","stordal.no","storfjord.no","omasvuotna.no","strand.no","stranda.no","stryn.no","sula.no","suldal.no","sund.no","sunndal.no","surnadal.no","sveio.no","svelvik.no","sykkylven.no","sogne.no","søgne.no","somna.no","sømna.no","sondre-land.no","søndre-land.no","sor-aurdal.no","sør-aurdal.no","sor-fron.no","sør-fron.no","sor-odal.no","sør-odal.no","sor-varanger.no","sør-varanger.no","matta-varjjat.no","mátta-várjjat.no","sorfold.no","sørfold.no","sorreisa.no","sørreisa.no","sorum.no","sørum.no","tana.no","deatnu.no","time.no","tingvoll.no","tinn.no","tjeldsund.no","dielddanuorri.no","tjome.no","tjøme.no","tokke.no","tolga.no","torsken.no","tranoy.no","tranøy.no","tromso.no","tromsø.no","tromsa.no","romsa.no","trondheim.no","troandin.no","trysil.no","trana.no","træna.no","trogstad.no","trøgstad.no","tvedestrand.no","tydal.no","tynset.no","tysfjord.no","divtasvuodna.no","divttasvuotna.no","tysnes.no","tysvar.no","tysvær.no","tonsberg.no","tønsberg.no","ullensaker.no","ullensvang.no","ulvik.no","utsira.no","vadso.no","vadsø.no","cahcesuolo.no","čáhcesuolo.no","vaksdal.no","valle.no","vang.no","vanylven.no","vardo.no","vardø.no","varggat.no","várggát.no","vefsn.no","vaapste.no","vega.no","vegarshei.no","vegårshei.no","vennesla.no","verdal.no","verran.no","vestby.no","vestnes.no","vestre-slidre.no","vestre-toten.no","vestvagoy.no","vestvågøy.no","vevelstad.no","vik.no","vikna.no","vindafjord.no","volda.no","voss.no","varoy.no","værøy.no","vagan.no","vågan.no","voagat.no","vagsoy.no","vågsøy.no","vaga.no","vågå.no","valer.ostfold.no","våler.østfold.no","valer.hedmark.no","våler.hedmark.no","*.np","nr","biz.nr","info.nr","gov.nr","edu.nr","org.nr","net.nr","com.nr","nu","nz","ac.nz","co.nz","cri.nz","geek.nz","gen.nz","govt.nz","health.nz","iwi.nz","kiwi.nz","maori.nz","mil.nz","māori.nz","net.nz","org.nz","parliament.nz","school.nz","om","co.om","com.om","edu.om","gov.om","med.om","museum.om","net.om","org.om","pro.om","onion","org","pa","ac.pa","gob.pa","com.pa","org.pa","sld.pa","edu.pa","net.pa","ing.pa","abo.pa","med.pa","nom.pa","pe","edu.pe","gob.pe","nom.pe","mil.pe","org.pe","com.pe","net.pe","pf","com.pf","org.pf","edu.pf","*.pg","ph","com.ph","net.ph","org.ph","gov.ph","edu.ph","ngo.ph","mil.ph","i.ph","pk","com.pk","net.pk","edu.pk","org.pk","fam.pk","biz.pk","web.pk","gov.pk","gob.pk","gok.pk","gon.pk","gop.pk","gos.pk","info.pk","pl","com.pl","net.pl","org.pl","aid.pl","agro.pl","atm.pl","auto.pl","biz.pl","edu.pl","gmina.pl","gsm.pl","info.pl","mail.pl","miasta.pl","media.pl","mil.pl","nieruchomosci.pl","nom.pl","pc.pl","powiat.pl","priv.pl","realestate.pl","rel.pl","sex.pl","shop.pl","sklep.pl","sos.pl","szkola.pl","targi.pl","tm.pl","tourism.pl","travel.pl","turystyka.pl","gov.pl","ap.gov.pl","ic.gov.pl","is.gov.pl","us.gov.pl","kmpsp.gov.pl","kppsp.gov.pl","kwpsp.gov.pl","psp.gov.pl","wskr.gov.pl","kwp.gov.pl","mw.gov.pl","ug.gov.pl","um.gov.pl","umig.gov.pl","ugim.gov.pl","upow.gov.pl","uw.gov.pl","starostwo.gov.pl","pa.gov.pl","po.gov.pl","psse.gov.pl","pup.gov.pl","rzgw.gov.pl","sa.gov.pl","so.gov.pl","sr.gov.pl","wsa.gov.pl","sko.gov.pl","uzs.gov.pl","wiih.gov.pl","winb.gov.pl","pinb.gov.pl","wios.gov.pl","witd.gov.pl","wzmiuw.gov.pl","piw.gov.pl","wiw.gov.pl","griw.gov.pl","wif.gov.pl","oum.gov.pl","sdn.gov.pl","zp.gov.pl","uppo.gov.pl","mup.gov.pl","wuoz.gov.pl","konsulat.gov.pl","oirm.gov.pl","augustow.pl","babia-gora.pl","bedzin.pl","beskidy.pl","bialowieza.pl","bialystok.pl","bielawa.pl","bieszczady.pl","boleslawiec.pl","bydgoszcz.pl","bytom.pl","cieszyn.pl","czeladz.pl","czest.pl","dlugoleka.pl","elblag.pl","elk.pl","glogow.pl","gniezno.pl","gorlice.pl","grajewo.pl","ilawa.pl","jaworzno.pl","jelenia-gora.pl","jgora.pl","kalisz.pl","kazimierz-dolny.pl","karpacz.pl","kartuzy.pl","kaszuby.pl","katowice.pl","kepno.pl","ketrzyn.pl","klodzko.pl","kobierzyce.pl","kolobrzeg.pl","konin.pl","konskowola.pl","kutno.pl","lapy.pl","lebork.pl","legnica.pl","lezajsk.pl","limanowa.pl","lomza.pl","lowicz.pl","lubin.pl","lukow.pl","malbork.pl","malopolska.pl","mazowsze.pl","mazury.pl","mielec.pl","mielno.pl","mragowo.pl","naklo.pl","nowaruda.pl","nysa.pl","olawa.pl","olecko.pl","olkusz.pl","olsztyn.pl","opoczno.pl","opole.pl","ostroda.pl","ostroleka.pl","ostrowiec.pl","ostrowwlkp.pl","pila.pl","pisz.pl","podhale.pl","podlasie.pl","polkowice.pl","pomorze.pl","pomorskie.pl","prochowice.pl","pruszkow.pl","przeworsk.pl","pulawy.pl","radom.pl","rawa-maz.pl","rybnik.pl","rzeszow.pl","sanok.pl","sejny.pl","slask.pl","slupsk.pl","sosnowiec.pl","stalowa-wola.pl","skoczow.pl","starachowice.pl","stargard.pl","suwalki.pl","swidnica.pl","swiebodzin.pl","swinoujscie.pl","szczecin.pl","szczytno.pl","tarnobrzeg.pl","tgory.pl","turek.pl","tychy.pl","ustka.pl","walbrzych.pl","warmia.pl","warszawa.pl","waw.pl","wegrow.pl","wielun.pl","wlocl.pl","wloclawek.pl","wodzislaw.pl","wolomin.pl","wroclaw.pl","zachpomor.pl","zagan.pl","zarow.pl","zgora.pl","zgorzelec.pl","pm","pn","gov.pn","co.pn","org.pn","edu.pn","net.pn","post","pr","com.pr","net.pr","org.pr","gov.pr","edu.pr","isla.pr","pro.pr","biz.pr","info.pr","name.pr","est.pr","prof.pr","ac.pr","pro","aaa.pro","aca.pro","acct.pro","avocat.pro","bar.pro","cpa.pro","eng.pro","jur.pro","law.pro","med.pro","recht.pro","ps","edu.ps","gov.ps","sec.ps","plo.ps","com.ps","org.ps","net.ps","pt","net.pt","gov.pt","org.pt","edu.pt","int.pt","publ.pt","com.pt","nome.pt","pw","co.pw","ne.pw","or.pw","ed.pw","go.pw","belau.pw","py","com.py","coop.py","edu.py","gov.py","mil.py","net.py","org.py","qa","com.qa","edu.qa","gov.qa","mil.qa","name.qa","net.qa","org.qa","sch.qa","re","asso.re","com.re","nom.re","ro","arts.ro","com.ro","firm.ro","info.ro","nom.ro","nt.ro","org.ro","rec.ro","store.ro","tm.ro","www.ro","rs","ac.rs","co.rs","edu.rs","gov.rs","in.rs","org.rs","ru","rw","ac.rw","co.rw","coop.rw","gov.rw","mil.rw","net.rw","org.rw","sa","com.sa","net.sa","org.sa","gov.sa","med.sa","pub.sa","edu.sa","sch.sa","sb","com.sb","edu.sb","gov.sb","net.sb","org.sb","sc","com.sc","gov.sc","net.sc","org.sc","edu.sc","sd","com.sd","net.sd","org.sd","edu.sd","med.sd","tv.sd","gov.sd","info.sd","se","a.se","ac.se","b.se","bd.se","brand.se","c.se","d.se","e.se","f.se","fh.se","fhsk.se","fhv.se","g.se","h.se","i.se","k.se","komforb.se","kommunalforbund.se","komvux.se","l.se","lanbib.se","m.se","n.se","naturbruksgymn.se","o.se","org.se","p.se","parti.se","pp.se","press.se","r.se","s.se","t.se","tm.se","u.se","w.se","x.se","y.se","z.se","sg","com.sg","net.sg","org.sg","gov.sg","edu.sg","per.sg","sh","com.sh","net.sh","gov.sh","org.sh","mil.sh","si","sj","sk","sl","com.sl","net.sl","edu.sl","gov.sl","org.sl","sm","sn","art.sn","com.sn","edu.sn","gouv.sn","org.sn","perso.sn","univ.sn","so","com.so","edu.so","gov.so","me.so","net.so","org.so","sr","ss","biz.ss","com.ss","edu.ss","gov.ss","net.ss","org.ss","st","co.st","com.st","consulado.st","edu.st","embaixada.st","gov.st","mil.st","net.st","org.st","principe.st","saotome.st","store.st","su","sv","com.sv","edu.sv","gob.sv","org.sv","red.sv","sx","gov.sx","sy","edu.sy","gov.sy","net.sy","mil.sy","com.sy","org.sy","sz","co.sz","ac.sz","org.sz","tc","td","tel","tf","tg","th","ac.th","co.th","go.th","in.th","mi.th","net.th","or.th","tj","ac.tj","biz.tj","co.tj","com.tj","edu.tj","go.tj","gov.tj","int.tj","mil.tj","name.tj","net.tj","nic.tj","org.tj","test.tj","web.tj","tk","tl","gov.tl","tm","com.tm","co.tm","org.tm","net.tm","nom.tm","gov.tm","mil.tm","edu.tm","tn","com.tn","ens.tn","fin.tn","gov.tn","ind.tn","intl.tn","nat.tn","net.tn","org.tn","info.tn","perso.tn","tourism.tn","edunet.tn","rnrt.tn","rns.tn","rnu.tn","mincom.tn","agrinet.tn","defense.tn","turen.tn","to","com.to","gov.to","net.to","org.to","edu.to","mil.to","tr","av.tr","bbs.tr","bel.tr","biz.tr","com.tr","dr.tr","edu.tr","gen.tr","gov.tr","info.tr","mil.tr","k12.tr","kep.tr","name.tr","net.tr","org.tr","pol.tr","tel.tr","tsk.tr","tv.tr","web.tr","nc.tr","gov.nc.tr","tt","co.tt","com.tt","org.tt","net.tt","biz.tt","info.tt","pro.tt","int.tt","coop.tt","jobs.tt","mobi.tt","travel.tt","museum.tt","aero.tt","name.tt","gov.tt","edu.tt","tv","tw","edu.tw","gov.tw","mil.tw","com.tw","net.tw","org.tw","idv.tw","game.tw","ebiz.tw","club.tw","網路.tw","組織.tw","商業.tw","tz","ac.tz","co.tz","go.tz","hotel.tz","info.tz","me.tz","mil.tz","mobi.tz","ne.tz","or.tz","sc.tz","tv.tz","ua","com.ua","edu.ua","gov.ua","in.ua","net.ua","org.ua","cherkassy.ua","cherkasy.ua","chernigov.ua","chernihiv.ua","chernivtsi.ua","chernovtsy.ua","ck.ua","cn.ua","cr.ua","crimea.ua","cv.ua","dn.ua","dnepropetrovsk.ua","dnipropetrovsk.ua","dominic.ua","donetsk.ua","dp.ua","if.ua","ivano-frankivsk.ua","kh.ua","kharkiv.ua","kharkov.ua","kherson.ua","khmelnitskiy.ua","khmelnytskyi.ua","kiev.ua","kirovograd.ua","km.ua","kr.ua","krym.ua","ks.ua","kv.ua","kyiv.ua","lg.ua","lt.ua","lugansk.ua","lutsk.ua","lv.ua","lviv.ua","mk.ua","mykolaiv.ua","nikolaev.ua","od.ua","odesa.ua","odessa.ua","pl.ua","poltava.ua","rivne.ua","rovno.ua","rv.ua","sb.ua","sebastopol.ua","sevastopol.ua","sm.ua","sumy.ua","te.ua","ternopil.ua","uz.ua","uzhgorod.ua","vinnica.ua","vinnytsia.ua","vn.ua","volyn.ua","yalta.ua","zaporizhzhe.ua","zaporizhzhia.ua","zhitomir.ua","zhytomyr.ua","zp.ua","zt.ua","ug","co.ug","or.ug","ac.ug","sc.ug","go.ug","ne.ug","com.ug","org.ug","uk","ac.uk","co.uk","gov.uk","ltd.uk","me.uk","net.uk","nhs.uk","org.uk","plc.uk","police.uk","*.sch.uk","us","dni.us","fed.us","isa.us","kids.us","nsn.us","ak.us","al.us","ar.us","as.us","az.us","ca.us","co.us","ct.us","dc.us","de.us","fl.us","ga.us","gu.us","hi.us","ia.us","id.us","il.us","in.us","ks.us","ky.us","la.us","ma.us","md.us","me.us","mi.us","mn.us","mo.us","ms.us","mt.us","nc.us","nd.us","ne.us","nh.us","nj.us","nm.us","nv.us","ny.us","oh.us","ok.us","or.us","pa.us","pr.us","ri.us","sc.us","sd.us","tn.us","tx.us","ut.us","vi.us","vt.us","va.us","wa.us","wi.us","wv.us","wy.us","k12.ak.us","k12.al.us","k12.ar.us","k12.as.us","k12.az.us","k12.ca.us","k12.co.us","k12.ct.us","k12.dc.us","k12.de.us","k12.fl.us","k12.ga.us","k12.gu.us","k12.ia.us","k12.id.us","k12.il.us","k12.in.us","k12.ks.us","k12.ky.us","k12.la.us","k12.ma.us","k12.md.us","k12.me.us","k12.mi.us","k12.mn.us","k12.mo.us","k12.ms.us","k12.mt.us","k12.nc.us","k12.ne.us","k12.nh.us","k12.nj.us","k12.nm.us","k12.nv.us","k12.ny.us","k12.oh.us","k12.ok.us","k12.or.us","k12.pa.us","k12.pr.us","k12.ri.us","k12.sc.us","k12.tn.us","k12.tx.us","k12.ut.us","k12.vi.us","k12.vt.us","k12.va.us","k12.wa.us","k12.wi.us","k12.wy.us","cc.ak.us","cc.al.us","cc.ar.us","cc.as.us","cc.az.us","cc.ca.us","cc.co.us","cc.ct.us","cc.dc.us","cc.de.us","cc.fl.us","cc.ga.us","cc.gu.us","cc.hi.us","cc.ia.us","cc.id.us","cc.il.us","cc.in.us","cc.ks.us","cc.ky.us","cc.la.us","cc.ma.us","cc.md.us","cc.me.us","cc.mi.us","cc.mn.us","cc.mo.us","cc.ms.us","cc.mt.us","cc.nc.us","cc.nd.us","cc.ne.us","cc.nh.us","cc.nj.us","cc.nm.us","cc.nv.us","cc.ny.us","cc.oh.us","cc.ok.us","cc.or.us","cc.pa.us","cc.pr.us","cc.ri.us","cc.sc.us","cc.sd.us","cc.tn.us","cc.tx.us","cc.ut.us","cc.vi.us","cc.vt.us","cc.va.us","cc.wa.us","cc.wi.us","cc.wv.us","cc.wy.us","lib.ak.us","lib.al.us","lib.ar.us","lib.as.us","lib.az.us","lib.ca.us","lib.co.us","lib.ct.us","lib.dc.us","lib.fl.us","lib.ga.us","lib.gu.us","lib.hi.us","lib.ia.us","lib.id.us","lib.il.us","lib.in.us","lib.ks.us","lib.ky.us","lib.la.us","lib.ma.us","lib.md.us","lib.me.us","lib.mi.us","lib.mn.us","lib.mo.us","lib.ms.us","lib.mt.us","lib.nc.us","lib.nd.us","lib.ne.us","lib.nh.us","lib.nj.us","lib.nm.us","lib.nv.us","lib.ny.us","lib.oh.us","lib.ok.us","lib.or.us","lib.pa.us","lib.pr.us","lib.ri.us","lib.sc.us","lib.sd.us","lib.tn.us","lib.tx.us","lib.ut.us","lib.vi.us","lib.vt.us","lib.va.us","lib.wa.us","lib.wi.us","lib.wy.us","pvt.k12.ma.us","chtr.k12.ma.us","paroch.k12.ma.us","ann-arbor.mi.us","cog.mi.us","dst.mi.us","eaton.mi.us","gen.mi.us","mus.mi.us","tec.mi.us","washtenaw.mi.us","uy","com.uy","edu.uy","gub.uy","mil.uy","net.uy","org.uy","uz","co.uz","com.uz","net.uz","org.uz","va","vc","com.vc","net.vc","org.vc","gov.vc","mil.vc","edu.vc","ve","arts.ve","co.ve","com.ve","e12.ve","edu.ve","firm.ve","gob.ve","gov.ve","info.ve","int.ve","mil.ve","net.ve","org.ve","rec.ve","store.ve","tec.ve","web.ve","vg","vi","co.vi","com.vi","k12.vi","net.vi","org.vi","vn","com.vn","net.vn","org.vn","edu.vn","gov.vn","int.vn","ac.vn","biz.vn","info.vn","name.vn","pro.vn","health.vn","vu","com.vu","edu.vu","net.vu","org.vu","wf","ws","com.ws","net.ws","org.ws","gov.ws","edu.ws","yt","امارات","հայ","বাংলা","бг","бел","中国","中國","الجزائر","مصر","ею","ευ","موريتانيا","გე","ελ","香港","公司.香港","教育.香港","政府.香港","個人.香港","網絡.香港","組織.香港","ಭಾರತ","ଭାରତ","ভাৰত","भारतम्","भारोत","ڀارت","ഭാരതം","भारत","بارت","بھارت","భారత్","ભારત","ਭਾਰਤ","ভারত","இந்தியா","ایران","ايران","عراق","الاردن","한국","қаз","ලංකා","இலங்கை","المغرب","мкд","мон","澳門","澳门","مليسيا","عمان","پاکستان","پاكستان","فلسطين","срб","пр.срб","орг.срб","обр.срб","од.срб","упр.срб","ак.срб","рф","قطر","السعودية","السعودیة","السعودیۃ","السعوديه","سودان","新加坡","சிங்கப்பூர்","سورية","سوريا","ไทย","ศึกษา.ไทย","ธุรกิจ.ไทย","รัฐบาล.ไทย","ทหาร.ไทย","เน็ต.ไทย","องค์กร.ไทย","تونس","台灣","台湾","臺灣","укр","اليمن","xxx","*.ye","ac.za","agric.za","alt.za","co.za","edu.za","gov.za","grondar.za","law.za","mil.za","net.za","ngo.za","nic.za","nis.za","nom.za","org.za","school.za","tm.za","web.za","zm","ac.zm","biz.zm","co.zm","com.zm","edu.zm","gov.zm","info.zm","mil.zm","net.zm","org.zm","sch.zm","zw","ac.zw","co.zw","gov.zw","mil.zw","org.zw","aaa","aarp","abarth","abb","abbott","abbvie","abc","able","abogado","abudhabi","academy","accenture","accountant","accountants","aco","actor","adac","ads","adult","aeg","aetna","afamilycompany","afl","africa","agakhan","agency","aig","aigo","airbus","airforce","airtel","akdn","alfaromeo","alibaba","alipay","allfinanz","allstate","ally","alsace","alstom","amazon","americanexpress","americanfamily","amex","amfam","amica","amsterdam","analytics","android","anquan","anz","aol","apartments","app","apple","aquarelle","arab","aramco","archi","army","art","arte","asda","associates","athleta","attorney","auction","audi","audible","audio","auspost","author","auto","autos","avianca","aws","axa","azure","baby","baidu","banamex","bananarepublic","band","bank","bar","barcelona","barclaycard","barclays","barefoot","bargains","baseball","basketball","bauhaus","bayern","bbc","bbt","bbva","bcg","bcn","beats","beauty","beer","bentley","berlin","best","bestbuy","bet","bharti","bible","bid","bike","bing","bingo","bio","black","blackfriday","blockbuster","blog","bloomberg","blue","bms","bmw","bnpparibas","boats","boehringer","bofa","bom","bond","boo","book","booking","bosch","bostik","boston","bot","boutique","box","bradesco","bridgestone","broadway","broker","brother","brussels","budapest","bugatti","build","builders","business","buy","buzz","bzh","cab","cafe","cal","call","calvinklein","cam","camera","camp","cancerresearch","canon","capetown","capital","capitalone","car","caravan","cards","care","career","careers","cars","casa","case","caseih","cash","casino","catering","catholic","cba","cbn","cbre","cbs","ceb","center","ceo","cern","cfa","cfd","chanel","channel","charity","chase","chat","cheap","chintai","christmas","chrome","church","cipriani","circle","cisco","citadel","citi","citic","city","cityeats","claims","cleaning","click","clinic","clinique","clothing","cloud","club","clubmed","coach","codes","coffee","college","cologne","comcast","commbank","community","company","compare","computer","comsec","condos","construction","consulting","contact","contractors","cooking","cookingchannel","cool","corsica","country","coupon","coupons","courses","cpa","credit","creditcard","creditunion","cricket","crown","crs","cruise","cruises","csc","cuisinella","cymru","cyou","dabur","dad","dance","data","date","dating","datsun","day","dclk","dds","deal","dealer","deals","degree","delivery","dell","deloitte","delta","democrat","dental","dentist","desi","design","dev","dhl","diamonds","diet","digital","direct","directory","discount","discover","dish","diy","dnp","docs","doctor","dog","domains","dot","download","drive","dtv","dubai","duck","dunlop","dupont","durban","dvag","dvr","earth","eat","eco","edeka","education","email","emerck","energy","engineer","engineering","enterprises","epson","equipment","ericsson","erni","esq","estate","esurance","etisalat","eurovision","eus","events","exchange","expert","exposed","express","extraspace","fage","fail","fairwinds","faith","family","fan","fans","farm","farmers","fashion","fast","fedex","feedback","ferrari","ferrero","fiat","fidelity","fido","film","final","finance","financial","fire","firestone","firmdale","fish","fishing","fit","fitness","flickr","flights","flir","florist","flowers","fly","foo","food","foodnetwork","football","ford","forex","forsale","forum","foundation","fox","free","fresenius","frl","frogans","frontdoor","frontier","ftr","fujitsu","fujixerox","fun","fund","furniture","futbol","fyi","gal","gallery","gallo","gallup","game","games","gap","garden","gay","gbiz","gdn","gea","gent","genting","george","ggee","gift","gifts","gives","giving","glade","glass","gle","global","globo","gmail","gmbh","gmo","gmx","godaddy","gold","goldpoint","golf","goo","goodyear","goog","google","gop","got","grainger","graphics","gratis","green","gripe","grocery","group","guardian","gucci","guge","guide","guitars","guru","hair","hamburg","hangout","haus","hbo","hdfc","hdfcbank","health","healthcare","help","helsinki","here","hermes","hgtv","hiphop","hisamitsu","hitachi","hiv","hkt","hockey","holdings","holiday","homedepot","homegoods","homes","homesense","honda","horse","hospital","host","hosting","hot","hoteles","hotels","hotmail","house","how","hsbc","hughes","hyatt","hyundai","ibm","icbc","ice","icu","ieee","ifm","ikano","imamat","imdb","immo","immobilien","inc","industries","infiniti","ing","ink","institute","insurance","insure","intel","international","intuit","investments","ipiranga","irish","ismaili","ist","istanbul","itau","itv","iveco","jaguar","java","jcb","jcp","jeep","jetzt","jewelry","jio","jll","jmp","jnj","joburg","jot","joy","jpmorgan","jprs","juegos","juniper","kaufen","kddi","kerryhotels","kerrylogistics","kerryproperties","kfh","kia","kim","kinder","kindle","kitchen","kiwi","koeln","komatsu","kosher","kpmg","kpn","krd","kred","kuokgroup","kyoto","lacaixa","lamborghini","lamer","lancaster","lancia","land","landrover","lanxess","lasalle","lat","latino","latrobe","law","lawyer","lds","lease","leclerc","lefrak","legal","lego","lexus","lgbt","lidl","life","lifeinsurance","lifestyle","lighting","like","lilly","limited","limo","lincoln","linde","link","lipsy","live","living","lixil","llc","llp","loan","loans","locker","locus","loft","lol","london","lotte","lotto","love","lpl","lplfinancial","ltd","ltda","lundbeck","lupin","luxe","luxury","macys","madrid","maif","maison","makeup","man","management","mango","map","market","marketing","markets","marriott","marshalls","maserati","mattel","mba","mckinsey","med","media","meet","melbourne","meme","memorial","men","menu","merckmsd","metlife","miami","microsoft","mini","mint","mit","mitsubishi","mlb","mls","mma","mobile","moda","moe","moi","mom","monash","money","monster","mormon","mortgage","moscow","moto","motorcycles","mov","movie","msd","mtn","mtr","mutual","nab","nadex","nagoya","nationwide","natura","navy","nba","nec","netbank","netflix","network","neustar","new","newholland","news","next","nextdirect","nexus","nfl","ngo","nhk","nico","nike","nikon","ninja","nissan","nissay","nokia","northwesternmutual","norton","now","nowruz","nowtv","nra","nrw","ntt","nyc","obi","observer","off","office","okinawa","olayan","olayangroup","oldnavy","ollo","omega","one","ong","onl","online","onyourside","ooo","open","oracle","orange","organic","origins","osaka","otsuka","ott","ovh","page","panasonic","paris","pars","partners","parts","party","passagens","pay","pccw","pet","pfizer","pharmacy","phd","philips","phone","photo","photography","photos","physio","pics","pictet","pictures","pid","pin","ping","pink","pioneer","pizza","place","play","playstation","plumbing","plus","pnc","pohl","poker","politie","porn","pramerica","praxi","press","prime","prod","productions","prof","progressive","promo","properties","property","protection","pru","prudential","pub","pwc","qpon","quebec","quest","qvc","racing","radio","raid","read","realestate","realtor","realty","recipes","red","redstone","redumbrella","rehab","reise","reisen","reit","reliance","ren","rent","rentals","repair","report","republican","rest","restaurant","review","reviews","rexroth","rich","richardli","ricoh","rightathome","ril","rio","rip","rmit","rocher","rocks","rodeo","rogers","room","rsvp","rugby","ruhr","run","rwe","ryukyu","saarland","safe","safety","sakura","sale","salon","samsclub","samsung","sandvik","sandvikcoromant","sanofi","sap","sarl","sas","save","saxo","sbi","sbs","sca","scb","schaeffler","schmidt","scholarships","school","schule","schwarz","science","scjohnson","scor","scot","search","seat","secure","security","seek","select","sener","services","ses","seven","sew","sex","sexy","sfr","shangrila","sharp","shaw","shell","shia","shiksha","shoes","shop","shopping","shouji","show","showtime","shriram","silk","sina","singles","site","ski","skin","sky","skype","sling","smart","smile","sncf","soccer","social","softbank","software","sohu","solar","solutions","song","sony","soy","spa","space","sport","spot","spreadbetting","srl","stada","staples","star","statebank","statefarm","stc","stcgroup","stockholm","storage","store","stream","studio","study","style","sucks","supplies","supply","support","surf","surgery","suzuki","swatch","swiftcover","swiss","sydney","symantec","systems","tab","taipei","talk","taobao","target","tatamotors","tatar","tattoo","tax","taxi","tci","tdk","team","tech","technology","temasek","tennis","teva","thd","theater","theatre","tiaa","tickets","tienda","tiffany","tips","tires","tirol","tjmaxx","tjx","tkmaxx","tmall","today","tokyo","tools","top","toray","toshiba","total","tours","town","toyota","toys","trade","trading","training","travel","travelchannel","travelers","travelersinsurance","trust","trv","tube","tui","tunes","tushu","tvs","ubank","ubs","unicom","university","uno","uol","ups","vacations","vana","vanguard","vegas","ventures","verisign","versicherung","vet","viajes","video","vig","viking","villas","vin","vip","virgin","visa","vision","viva","vivo","vlaanderen","vodka","volkswagen","volvo","vote","voting","voto","voyage","vuelos","wales","walmart","walter","wang","wanggou","watch","watches","weather","weatherchannel","webcam","weber","website","wed","wedding","weibo","weir","whoswho","wien","wiki","williamhill","win","windows","wine","winners","wme","wolterskluwer","woodside","work","works","world","wow","wtc","wtf","xbox","xerox","xfinity","xihuan","xin","कॉम","セール","佛山","慈善","集团","在线","大众汽车","点看","คอม","八卦","موقع","公益","公司","香格里拉","网站","移动","我爱你","москва","католик","онлайн","сайт","联通","קום","时尚","微博","淡马锡","ファッション","орг","नेट","ストア","アマゾン","삼성","商标","商店","商城","дети","ポイント","新闻","工行","家電","كوم","中文网","中信","娱乐","谷歌","電訊盈科","购物","クラウド","通販","网店","संगठन","餐厅","网络","ком","亚马逊","诺基亚","食品","飞利浦","手表","手机","ارامكو","العليان","اتصالات","بازار","ابوظبي","كاثوليك","همراه","닷컴","政府","شبكة","بيتك","عرب","机构","组织机构","健康","招聘","рус","珠宝","大拿","みんな","グーグル","世界","書籍","网址","닷넷","コム","天主教","游戏","vermögensberater","vermögensberatung","企业","信息","嘉里大酒店","嘉里","广东","政务","xyz","yachts","yahoo","yamaxun","yandex","yodobashi","yoga","yokohama","you","youtube","yun","zappos","zara","zero","zip","zone","zuerich","cc.ua","inf.ua","ltd.ua","adobeaemcloud.com","adobeaemcloud.net","*.dev.adobeaemcloud.com","beep.pl","barsy.ca","*.compute.estate","*.alces.network","altervista.org","alwaysdata.net","cloudfront.net","*.compute.amazonaws.com","*.compute-1.amazonaws.com","*.compute.amazonaws.com.cn","us-east-1.amazonaws.com","cn-north-1.eb.amazonaws.com.cn","cn-northwest-1.eb.amazonaws.com.cn","elasticbeanstalk.com","ap-northeast-1.elasticbeanstalk.com","ap-northeast-2.elasticbeanstalk.com","ap-northeast-3.elasticbeanstalk.com","ap-south-1.elasticbeanstalk.com","ap-southeast-1.elasticbeanstalk.com","ap-southeast-2.elasticbeanstalk.com","ca-central-1.elasticbeanstalk.com","eu-central-1.elasticbeanstalk.com","eu-west-1.elasticbeanstalk.com","eu-west-2.elasticbeanstalk.com","eu-west-3.elasticbeanstalk.com","sa-east-1.elasticbeanstalk.com","us-east-1.elasticbeanstalk.com","us-east-2.elasticbeanstalk.com","us-gov-west-1.elasticbeanstalk.com","us-west-1.elasticbeanstalk.com","us-west-2.elasticbeanstalk.com","*.elb.amazonaws.com","*.elb.amazonaws.com.cn","s3.amazonaws.com","s3-ap-northeast-1.amazonaws.com","s3-ap-northeast-2.amazonaws.com","s3-ap-south-1.amazonaws.com","s3-ap-southeast-1.amazonaws.com","s3-ap-southeast-2.amazonaws.com","s3-ca-central-1.amazonaws.com","s3-eu-central-1.amazonaws.com","s3-eu-west-1.amazonaws.com","s3-eu-west-2.amazonaws.com","s3-eu-west-3.amazonaws.com","s3-external-1.amazonaws.com","s3-fips-us-gov-west-1.amazonaws.com","s3-sa-east-1.amazonaws.com","s3-us-gov-west-1.amazonaws.com","s3-us-east-2.amazonaws.com","s3-us-west-1.amazonaws.com","s3-us-west-2.amazonaws.com","s3.ap-northeast-2.amazonaws.com","s3.ap-south-1.amazonaws.com","s3.cn-north-1.amazonaws.com.cn","s3.ca-central-1.amazonaws.com","s3.eu-central-1.amazonaws.com","s3.eu-west-2.amazonaws.com","s3.eu-west-3.amazonaws.com","s3.us-east-2.amazonaws.com","s3.dualstack.ap-northeast-1.amazonaws.com","s3.dualstack.ap-northeast-2.amazonaws.com","s3.dualstack.ap-south-1.amazonaws.com","s3.dualstack.ap-southeast-1.amazonaws.com","s3.dualstack.ap-southeast-2.amazonaws.com","s3.dualstack.ca-central-1.amazonaws.com","s3.dualstack.eu-central-1.amazonaws.com","s3.dualstack.eu-west-1.amazonaws.com","s3.dualstack.eu-west-2.amazonaws.com","s3.dualstack.eu-west-3.amazonaws.com","s3.dualstack.sa-east-1.amazonaws.com","s3.dualstack.us-east-1.amazonaws.com","s3.dualstack.us-east-2.amazonaws.com","s3-website-us-east-1.amazonaws.com","s3-website-us-west-1.amazonaws.com","s3-website-us-west-2.amazonaws.com","s3-website-ap-northeast-1.amazonaws.com","s3-website-ap-southeast-1.amazonaws.com","s3-website-ap-southeast-2.amazonaws.com","s3-website-eu-west-1.amazonaws.com","s3-website-sa-east-1.amazonaws.com","s3-website.ap-northeast-2.amazonaws.com","s3-website.ap-south-1.amazonaws.com","s3-website.ca-central-1.amazonaws.com","s3-website.eu-central-1.amazonaws.com","s3-website.eu-west-2.amazonaws.com","s3-website.eu-west-3.amazonaws.com","s3-website.us-east-2.amazonaws.com","amsw.nl","t3l3p0rt.net","tele.amune.org","apigee.io","on-aptible.com","user.aseinet.ne.jp","gv.vc","d.gv.vc","user.party.eus","pimienta.org","poivron.org","potager.org","sweetpepper.org","myasustor.com","myfritz.net","*.awdev.ca","*.advisor.ws","b-data.io","backplaneapp.io","balena-devices.com","app.banzaicloud.io","betainabox.com","bnr.la","blackbaudcdn.net","boomla.net","boxfuse.io","square7.ch","bplaced.com","bplaced.de","square7.de","bplaced.net","square7.net","browsersafetymark.io","uk0.bigv.io","dh.bytemark.co.uk","vm.bytemark.co.uk","mycd.eu","carrd.co","crd.co","uwu.ai","ae.org","ar.com","br.com","cn.com","com.de","com.se","de.com","eu.com","gb.com","gb.net","hu.com","hu.net","jp.net","jpn.com","kr.com","mex.com","no.com","qc.com","ru.com","sa.com","se.net","uk.com","uk.net","us.com","uy.com","za.bz","za.com","africa.com","gr.com","in.net","us.org","co.com","c.la","certmgr.org","xenapponazure.com","discourse.group","discourse.team","virtueeldomein.nl","cleverapps.io","*.lcl.dev","*.stg.dev","c66.me","cloud66.ws","cloud66.zone","jdevcloud.com","wpdevcloud.com","cloudaccess.host","freesite.host","cloudaccess.net","cloudcontrolled.com","cloudcontrolapp.com","cloudera.site","trycloudflare.com","workers.dev","wnext.app","co.ca","*.otap.co","co.cz","c.cdn77.org","cdn77-ssl.net","r.cdn77.net","rsc.cdn77.org","ssl.origin.cdn77-secure.org","cloudns.asia","cloudns.biz","cloudns.club","cloudns.cc","cloudns.eu","cloudns.in","cloudns.info","cloudns.org","cloudns.pro","cloudns.pw","cloudns.us","cloudeity.net","cnpy.gdn","co.nl","co.no","webhosting.be","hosting-cluster.nl","ac.ru","edu.ru","gov.ru","int.ru","mil.ru","test.ru","dyn.cosidns.de","dynamisches-dns.de","dnsupdater.de","internet-dns.de","l-o-g-i-n.de","dynamic-dns.info","feste-ip.net","knx-server.net","static-access.net","realm.cz","*.cryptonomic.net","cupcake.is","*.customer-oci.com","*.oci.customer-oci.com","*.ocp.customer-oci.com","*.ocs.customer-oci.com","cyon.link","cyon.site","daplie.me","localhost.daplie.me","dattolocal.com","dattorelay.com","dattoweb.com","mydatto.com","dattolocal.net","mydatto.net","biz.dk","co.dk","firm.dk","reg.dk","store.dk","*.dapps.earth","*.bzz.dapps.earth","builtwithdark.com","edgestack.me","debian.net","dedyn.io","dnshome.de","online.th","shop.th","drayddns.com","dreamhosters.com","mydrobo.com","drud.io","drud.us","duckdns.org","dy.fi","tunk.org","dyndns-at-home.com","dyndns-at-work.com","dyndns-blog.com","dyndns-free.com","dyndns-home.com","dyndns-ip.com","dyndns-mail.com","dyndns-office.com","dyndns-pics.com","dyndns-remote.com","dyndns-server.com","dyndns-web.com","dyndns-wiki.com","dyndns-work.com","dyndns.biz","dyndns.info","dyndns.org","dyndns.tv","at-band-camp.net","ath.cx","barrel-of-knowledge.info","barrell-of-knowledge.info","better-than.tv","blogdns.com","blogdns.net","blogdns.org","blogsite.org","boldlygoingnowhere.org","broke-it.net","buyshouses.net","cechire.com","dnsalias.com","dnsalias.net","dnsalias.org","dnsdojo.com","dnsdojo.net","dnsdojo.org","does-it.net","doesntexist.com","doesntexist.org","dontexist.com","dontexist.net","dontexist.org","doomdns.com","doomdns.org","dvrdns.org","dyn-o-saur.com","dynalias.com","dynalias.net","dynalias.org","dynathome.net","dyndns.ws","endofinternet.net","endofinternet.org","endoftheinternet.org","est-a-la-maison.com","est-a-la-masion.com","est-le-patron.com","est-mon-blogueur.com","for-better.biz","for-more.biz","for-our.info","for-some.biz","for-the.biz","forgot.her.name","forgot.his.name","from-ak.com","from-al.com","from-ar.com","from-az.net","from-ca.com","from-co.net","from-ct.com","from-dc.com","from-de.com","from-fl.com","from-ga.com","from-hi.com","from-ia.com","from-id.com","from-il.com","from-in.com","from-ks.com","from-ky.com","from-la.net","from-ma.com","from-md.com","from-me.org","from-mi.com","from-mn.com","from-mo.com","from-ms.com","from-mt.com","from-nc.com","from-nd.com","from-ne.com","from-nh.com","from-nj.com","from-nm.com","from-nv.com","from-ny.net","from-oh.com","from-ok.com","from-or.com","from-pa.com","from-pr.com","from-ri.com","from-sc.com","from-sd.com","from-tn.com","from-tx.com","from-ut.com","from-va.com","from-vt.com","from-wa.com","from-wi.com","from-wv.com","from-wy.com","ftpaccess.cc","fuettertdasnetz.de","game-host.org","game-server.cc","getmyip.com","gets-it.net","go.dyndns.org","gotdns.com","gotdns.org","groks-the.info","groks-this.info","ham-radio-op.net","here-for-more.info","hobby-site.com","hobby-site.org","home.dyndns.org","homedns.org","homeftp.net","homeftp.org","homeip.net","homelinux.com","homelinux.net","homelinux.org","homeunix.com","homeunix.net","homeunix.org","iamallama.com","in-the-band.net","is-a-anarchist.com","is-a-blogger.com","is-a-bookkeeper.com","is-a-bruinsfan.org","is-a-bulls-fan.com","is-a-candidate.org","is-a-caterer.com","is-a-celticsfan.org","is-a-chef.com","is-a-chef.net","is-a-chef.org","is-a-conservative.com","is-a-cpa.com","is-a-cubicle-slave.com","is-a-democrat.com","is-a-designer.com","is-a-doctor.com","is-a-financialadvisor.com","is-a-geek.com","is-a-geek.net","is-a-geek.org","is-a-green.com","is-a-guru.com","is-a-hard-worker.com","is-a-hunter.com","is-a-knight.org","is-a-landscaper.com","is-a-lawyer.com","is-a-liberal.com","is-a-libertarian.com","is-a-linux-user.org","is-a-llama.com","is-a-musician.com","is-a-nascarfan.com","is-a-nurse.com","is-a-painter.com","is-a-patsfan.org","is-a-personaltrainer.com","is-a-photographer.com","is-a-player.com","is-a-republican.com","is-a-rockstar.com","is-a-socialist.com","is-a-soxfan.org","is-a-student.com","is-a-teacher.com","is-a-techie.com","is-a-therapist.com","is-an-accountant.com","is-an-actor.com","is-an-actress.com","is-an-anarchist.com","is-an-artist.com","is-an-engineer.com","is-an-entertainer.com","is-by.us","is-certified.com","is-found.org","is-gone.com","is-into-anime.com","is-into-cars.com","is-into-cartoons.com","is-into-games.com","is-leet.com","is-lost.org","is-not-certified.com","is-saved.org","is-slick.com","is-uberleet.com","is-very-bad.org","is-very-evil.org","is-very-good.org","is-very-nice.org","is-very-sweet.org","is-with-theband.com","isa-geek.com","isa-geek.net","isa-geek.org","isa-hockeynut.com","issmarterthanyou.com","isteingeek.de","istmein.de","kicks-ass.net","kicks-ass.org","knowsitall.info","land-4-sale.us","lebtimnetz.de","leitungsen.de","likes-pie.com","likescandy.com","merseine.nu","mine.nu","misconfused.org","mypets.ws","myphotos.cc","neat-url.com","office-on-the.net","on-the-web.tv","podzone.net","podzone.org","readmyblog.org","saves-the-whales.com","scrapper-site.net","scrapping.cc","selfip.biz","selfip.com","selfip.info","selfip.net","selfip.org","sells-for-less.com","sells-for-u.com","sells-it.net","sellsyourhome.org","servebbs.com","servebbs.net","servebbs.org","serveftp.net","serveftp.org","servegame.org","shacknet.nu","simple-url.com","space-to-rent.com","stuff-4-sale.org","stuff-4-sale.us","teaches-yoga.com","thruhere.net","traeumtgerade.de","webhop.biz","webhop.info","webhop.net","webhop.org","worse-than.tv","writesthisblog.com","ddnss.de","dyn.ddnss.de","dyndns.ddnss.de","dyndns1.de","dyn-ip24.de","home-webserver.de","dyn.home-webserver.de","myhome-server.de","ddnss.org","definima.net","definima.io","bci.dnstrace.pro","ddnsfree.com","ddnsgeek.com","giize.com","gleeze.com","kozow.com","loseyourip.com","ooguy.com","theworkpc.com","casacam.net","dynu.net","accesscam.org","camdvr.org","freeddns.org","mywire.org","webredirect.org","myddns.rocks","blogsite.xyz","dynv6.net","e4.cz","en-root.fr","mytuleap.com","onred.one","staging.onred.one","enonic.io","customer.enonic.io","eu.org","al.eu.org","asso.eu.org","at.eu.org","au.eu.org","be.eu.org","bg.eu.org","ca.eu.org","cd.eu.org","ch.eu.org","cn.eu.org","cy.eu.org","cz.eu.org","de.eu.org","dk.eu.org","edu.eu.org","ee.eu.org","es.eu.org","fi.eu.org","fr.eu.org","gr.eu.org","hr.eu.org","hu.eu.org","ie.eu.org","il.eu.org","in.eu.org","int.eu.org","is.eu.org","it.eu.org","jp.eu.org","kr.eu.org","lt.eu.org","lu.eu.org","lv.eu.org","mc.eu.org","me.eu.org","mk.eu.org","mt.eu.org","my.eu.org","net.eu.org","ng.eu.org","nl.eu.org","no.eu.org","nz.eu.org","paris.eu.org","pl.eu.org","pt.eu.org","q-a.eu.org","ro.eu.org","ru.eu.org","se.eu.org","si.eu.org","sk.eu.org","tr.eu.org","uk.eu.org","us.eu.org","eu-1.evennode.com","eu-2.evennode.com","eu-3.evennode.com","eu-4.evennode.com","us-1.evennode.com","us-2.evennode.com","us-3.evennode.com","us-4.evennode.com","twmail.cc","twmail.net","twmail.org","mymailer.com.tw","url.tw","apps.fbsbx.com","ru.net","adygeya.ru","bashkiria.ru","bir.ru","cbg.ru","com.ru","dagestan.ru","grozny.ru","kalmykia.ru","kustanai.ru","marine.ru","mordovia.ru","msk.ru","mytis.ru","nalchik.ru","nov.ru","pyatigorsk.ru","spb.ru","vladikavkaz.ru","vladimir.ru","abkhazia.su","adygeya.su","aktyubinsk.su","arkhangelsk.su","armenia.su","ashgabad.su","azerbaijan.su","balashov.su","bashkiria.su","bryansk.su","bukhara.su","chimkent.su","dagestan.su","east-kazakhstan.su","exnet.su","georgia.su","grozny.su","ivanovo.su","jambyl.su","kalmykia.su","kaluga.su","karacol.su","karaganda.su","karelia.su","khakassia.su","krasnodar.su","kurgan.su","kustanai.su","lenug.su","mangyshlak.su","mordovia.su","msk.su","murmansk.su","nalchik.su","navoi.su","north-kazakhstan.su","nov.su","obninsk.su","penza.su","pokrovsk.su","sochi.su","spb.su","tashkent.su","termez.su","togliatti.su","troitsk.su","tselinograd.su","tula.su","tuva.su","vladikavkaz.su","vladimir.su","vologda.su","channelsdvr.net","u.channelsdvr.net","fastly-terrarium.com","fastlylb.net","map.fastlylb.net","freetls.fastly.net","map.fastly.net","a.prod.fastly.net","global.prod.fastly.net","a.ssl.fastly.net","b.ssl.fastly.net","global.ssl.fastly.net","fastpanel.direct","fastvps-server.com","fhapp.xyz","fedorainfracloud.org","fedorapeople.org","cloud.fedoraproject.org","app.os.fedoraproject.org","app.os.stg.fedoraproject.org","mydobiss.com","filegear.me","filegear-au.me","filegear-de.me","filegear-gb.me","filegear-ie.me","filegear-jp.me","filegear-sg.me","firebaseapp.com","flynnhub.com","flynnhosting.net","0e.vc","freebox-os.com","freeboxos.com","fbx-os.fr","fbxos.fr","freebox-os.fr","freeboxos.fr","freedesktop.org","*.futurecms.at","*.ex.futurecms.at","*.in.futurecms.at","futurehosting.at","futuremailing.at","*.ex.ortsinfo.at","*.kunden.ortsinfo.at","*.statics.cloud","service.gov.uk","gehirn.ne.jp","usercontent.jp","gentapps.com","lab.ms","github.io","githubusercontent.com","gitlab.io","glitch.me","lolipop.io","cloudapps.digital","london.cloudapps.digital","homeoffice.gov.uk","ro.im","shop.ro","goip.de","run.app","a.run.app","web.app","*.0emm.com","appspot.com","*.r.appspot.com","blogspot.ae","blogspot.al","blogspot.am","blogspot.ba","blogspot.be","blogspot.bg","blogspot.bj","blogspot.ca","blogspot.cf","blogspot.ch","blogspot.cl","blogspot.co.at","blogspot.co.id","blogspot.co.il","blogspot.co.ke","blogspot.co.nz","blogspot.co.uk","blogspot.co.za","blogspot.com","blogspot.com.ar","blogspot.com.au","blogspot.com.br","blogspot.com.by","blogspot.com.co","blogspot.com.cy","blogspot.com.ee","blogspot.com.eg","blogspot.com.es","blogspot.com.mt","blogspot.com.ng","blogspot.com.tr","blogspot.com.uy","blogspot.cv","blogspot.cz","blogspot.de","blogspot.dk","blogspot.fi","blogspot.fr","blogspot.gr","blogspot.hk","blogspot.hr","blogspot.hu","blogspot.ie","blogspot.in","blogspot.is","blogspot.it","blogspot.jp","blogspot.kr","blogspot.li","blogspot.lt","blogspot.lu","blogspot.md","blogspot.mk","blogspot.mr","blogspot.mx","blogspot.my","blogspot.nl","blogspot.no","blogspot.pe","blogspot.pt","blogspot.qa","blogspot.re","blogspot.ro","blogspot.rs","blogspot.ru","blogspot.se","blogspot.sg","blogspot.si","blogspot.sk","blogspot.sn","blogspot.td","blogspot.tw","blogspot.ug","blogspot.vn","cloudfunctions.net","cloud.goog","codespot.com","googleapis.com","googlecode.com","pagespeedmobilizer.com","publishproxy.com","withgoogle.com","withyoutube.com","awsmppl.com","fin.ci","free.hr","caa.li","ua.rs","conf.se","hs.zone","hs.run","hashbang.sh","hasura.app","hasura-app.io","hepforge.org","herokuapp.com","herokussl.com","myravendb.com","ravendb.community","ravendb.me","development.run","ravendb.run","bpl.biz","orx.biz","ng.city","biz.gl","ng.ink","col.ng","firm.ng","gen.ng","ltd.ng","ngo.ng","ng.school","sch.so","häkkinen.fi","*.moonscale.io","moonscale.net","iki.fi","dyn-berlin.de","in-berlin.de","in-brb.de","in-butter.de","in-dsl.de","in-dsl.net","in-dsl.org","in-vpn.de","in-vpn.net","in-vpn.org","biz.at","info.at","info.cx","ac.leg.br","al.leg.br","am.leg.br","ap.leg.br","ba.leg.br","ce.leg.br","df.leg.br","es.leg.br","go.leg.br","ma.leg.br","mg.leg.br","ms.leg.br","mt.leg.br","pa.leg.br","pb.leg.br","pe.leg.br","pi.leg.br","pr.leg.br","rj.leg.br","rn.leg.br","ro.leg.br","rr.leg.br","rs.leg.br","sc.leg.br","se.leg.br","sp.leg.br","to.leg.br","pixolino.com","ipifony.net","mein-iserv.de","test-iserv.de","iserv.dev","iobb.net","myjino.ru","*.hosting.myjino.ru","*.landing.myjino.ru","*.spectrum.myjino.ru","*.vps.myjino.ru","*.triton.zone","*.cns.joyent.com","js.org","kaas.gg","khplay.nl","keymachine.de","kinghost.net","uni5.net","knightpoint.systems","oya.to","co.krd","edu.krd","git-repos.de","lcube-server.de","svn-repos.de","leadpages.co","lpages.co","lpusercontent.com","lelux.site","co.business","co.education","co.events","co.financial","co.network","co.place","co.technology","app.lmpm.com","linkitools.space","linkyard.cloud","linkyard-cloud.ch","members.linode.com","nodebalancer.linode.com","we.bs","loginline.app","loginline.dev","loginline.io","loginline.services","loginline.site","krasnik.pl","leczna.pl","lubartow.pl","lublin.pl","poniatowa.pl","swidnik.pl","uklugs.org","glug.org.uk","lug.org.uk","lugs.org.uk","barsy.bg","barsy.co.uk","barsyonline.co.uk","barsycenter.com","barsyonline.com","barsy.club","barsy.de","barsy.eu","barsy.in","barsy.info","barsy.io","barsy.me","barsy.menu","barsy.mobi","barsy.net","barsy.online","barsy.org","barsy.pro","barsy.pub","barsy.shop","barsy.site","barsy.support","barsy.uk","*.magentosite.cloud","mayfirst.info","mayfirst.org","hb.cldmail.ru","miniserver.com","memset.net","cloud.metacentrum.cz","custom.metacentrum.cz","flt.cloud.muni.cz","usr.cloud.muni.cz","meteorapp.com","eu.meteorapp.com","co.pl","azurecontainer.io","azurewebsites.net","azure-mobile.net","cloudapp.net","mozilla-iot.org","bmoattachments.org","net.ru","org.ru","pp.ru","ui.nabu.casa","pony.club","of.fashion","on.fashion","of.football","in.london","of.london","for.men","and.mom","for.mom","for.one","for.sale","of.work","to.work","nctu.me","bitballoon.com","netlify.com","4u.com","ngrok.io","nh-serv.co.uk","nfshost.com","dnsking.ch","mypi.co","n4t.co","001www.com","ddnslive.com","myiphost.com","forumz.info","16-b.it","32-b.it","64-b.it","soundcast.me","tcp4.me","dnsup.net","hicam.net","now-dns.net","ownip.net","vpndns.net","dynserv.org","now-dns.org","x443.pw","now-dns.top","ntdll.top","freeddns.us","crafting.xyz","zapto.xyz","nsupdate.info","nerdpol.ovh","blogsyte.com","brasilia.me","cable-modem.org","ciscofreak.com","collegefan.org","couchpotatofries.org","damnserver.com","ddns.me","ditchyourip.com","dnsfor.me","dnsiskinky.com","dvrcam.info","dynns.com","eating-organic.net","fantasyleague.cc","geekgalaxy.com","golffan.us","health-carereform.com","homesecuritymac.com","homesecuritypc.com","hopto.me","ilovecollege.info","loginto.me","mlbfan.org","mmafan.biz","myactivedirectory.com","mydissent.net","myeffect.net","mymediapc.net","mypsx.net","mysecuritycamera.com","mysecuritycamera.net","mysecuritycamera.org","net-freaks.com","nflfan.org","nhlfan.net","no-ip.ca","no-ip.co.uk","no-ip.net","noip.us","onthewifi.com","pgafan.net","point2this.com","pointto.us","privatizehealthinsurance.net","quicksytes.com","read-books.org","securitytactics.com","serveexchange.com","servehumour.com","servep2p.com","servesarcasm.com","stufftoread.com","ufcfan.org","unusualperson.com","workisboring.com","3utilities.com","bounceme.net","ddns.net","ddnsking.com","gotdns.ch","hopto.org","myftp.biz","myftp.org","myvnc.com","no-ip.biz","no-ip.info","no-ip.org","noip.me","redirectme.net","servebeer.com","serveblog.net","servecounterstrike.com","serveftp.com","servegame.com","servehalflife.com","servehttp.com","serveirc.com","serveminecraft.net","servemp3.com","servepics.com","servequake.com","sytes.net","webhop.me","zapto.org","stage.nodeart.io","nodum.co","nodum.io","pcloud.host","nyc.mn","nom.ae","nom.af","nom.ai","nom.al","nym.by","nom.bz","nym.bz","nom.cl","nym.ec","nom.gd","nom.ge","nom.gl","nym.gr","nom.gt","nym.gy","nym.hk","nom.hn","nym.ie","nom.im","nom.ke","nym.kz","nym.la","nym.lc","nom.li","nym.li","nym.lt","nym.lu","nom.lv","nym.me","nom.mk","nym.mn","nym.mx","nom.nu","nym.nz","nym.pe","nym.pt","nom.pw","nom.qa","nym.ro","nom.rs","nom.si","nym.sk","nom.st","nym.su","nym.sx","nom.tj","nym.tw","nom.ug","nom.uy","nom.vc","nom.vg","static.observableusercontent.com","cya.gg","cloudycluster.net","nid.io","opencraft.hosting","operaunite.com","skygearapp.com","outsystemscloud.com","ownprovider.com","own.pm","ox.rs","oy.lc","pgfog.com","pagefrontapp.com","art.pl","gliwice.pl","krakow.pl","poznan.pl","wroc.pl","zakopane.pl","pantheonsite.io","gotpantheon.com","mypep.link","perspecta.cloud","on-web.fr","*.platform.sh","*.platformsh.site","dyn53.io","co.bn","xen.prgmr.com","priv.at","prvcy.page","*.dweb.link","protonet.io","chirurgiens-dentistes-en-france.fr","byen.site","pubtls.org","qualifioapp.com","qbuser.com","instantcloud.cn","ras.ru","qa2.com","qcx.io","*.sys.qcx.io","dev-myqnapcloud.com","alpha-myqnapcloud.com","myqnapcloud.com","*.quipelements.com","vapor.cloud","vaporcloud.io","rackmaze.com","rackmaze.net","*.on-k3s.io","*.on-rancher.cloud","*.on-rio.io","readthedocs.io","rhcloud.com","app.render.com","onrender.com","repl.co","repl.run","resindevice.io","devices.resinstaging.io","hzc.io","wellbeingzone.eu","ptplus.fit","wellbeingzone.co.uk","git-pages.rit.edu","sandcats.io","logoip.de","logoip.com","schokokeks.net","gov.scot","scrysec.com","firewall-gateway.com","firewall-gateway.de","my-gateway.de","my-router.de","spdns.de","spdns.eu","firewall-gateway.net","my-firewall.org","myfirewall.org","spdns.org","senseering.net","biz.ua","co.ua","pp.ua","shiftedit.io","myshopblocks.com","shopitsite.com","mo-siemens.io","1kapp.com","appchizi.com","applinzi.com","sinaapp.com","vipsinaapp.com","siteleaf.net","bounty-full.com","alpha.bounty-full.com","beta.bounty-full.com","stackhero-network.com","static.land","dev.static.land","sites.static.land","apps.lair.io","*.stolos.io","spacekit.io","customer.speedpartner.de","api.stdlib.com","storj.farm","utwente.io","soc.srcf.net","user.srcf.net","temp-dns.com","applicationcloud.io","scapp.io","*.s5y.io","*.sensiosite.cloud","syncloud.it","diskstation.me","dscloud.biz","dscloud.me","dscloud.mobi","dsmynas.com","dsmynas.net","dsmynas.org","familyds.com","familyds.net","familyds.org","i234.me","myds.me","synology.me","vpnplus.to","direct.quickconnect.to","taifun-dns.de","gda.pl","gdansk.pl","gdynia.pl","med.pl","sopot.pl","edugit.org","telebit.app","telebit.io","*.telebit.xyz","gwiddle.co.uk","thingdustdata.com","cust.dev.thingdust.io","cust.disrec.thingdust.io","cust.prod.thingdust.io","cust.testing.thingdust.io","arvo.network","azimuth.network","bloxcms.com","townnews-staging.com","12hp.at","2ix.at","4lima.at","lima-city.at","12hp.ch","2ix.ch","4lima.ch","lima-city.ch","trafficplex.cloud","de.cool","12hp.de","2ix.de","4lima.de","lima-city.de","1337.pictures","clan.rip","lima-city.rocks","webspace.rocks","lima.zone","*.transurl.be","*.transurl.eu","*.transurl.nl","tuxfamily.org","dd-dns.de","diskstation.eu","diskstation.org","dray-dns.de","draydns.de","dyn-vpn.de","dynvpn.de","mein-vigor.de","my-vigor.de","my-wan.de","syno-ds.de","synology-diskstation.de","synology-ds.de","uber.space","*.uberspace.de","hk.com","hk.org","ltd.hk","inc.hk","virtualuser.de","virtual-user.de","urown.cloud","dnsupdate.info","lib.de.us","2038.io","router.management","v-info.info","voorloper.cloud","v.ua","wafflecell.com","*.webhare.dev","wedeploy.io","wedeploy.me","wedeploy.sh","remotewd.com","wmflabs.org","myforum.community","community-pro.de","diskussionsbereich.de","community-pro.net","meinforum.net","half.host","xnbay.com","u2.xnbay.com","u2-local.xnbay.com","cistron.nl","demon.nl","xs4all.space","yandexcloud.net","storage.yandexcloud.net","website.yandexcloud.net","official.academy","yolasite.com","ybo.faith","yombo.me","homelink.one","ybo.party","ybo.review","ybo.science","ybo.trade","nohost.me","noho.st","za.net","za.org","now.sh","bss.design","basicserver.io","virtualserver.io","enterprisecloud.nu"]; + +/***/ }), +/* 51 */, +/* 52 */, +/* 53 */, +/* 54 */, +/* 55 */, +/* 56 */, +/* 57 */, +/* 58 */, +/* 59 */, +/* 60 */, +/* 61 */, +/* 62 */, +/* 63 */, +/* 64 */, +/* 65 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Disconnected: 1, + Preceding: 2, + Following: 4, + Contains: 8, + ContainedBy: 16, + ImplementationSpecific: 32 + }; + +}).call(this); + + +/***/ }), +/* 66 */, +/* 67 */, +/* 68 */, +/* 69 */, +/* 70 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +exports.URL = __webpack_require__(782).interface; +exports.serializeURL = __webpack_require__(936).serializeURL; +exports.serializeURLOrigin = __webpack_require__(936).serializeURLOrigin; +exports.basicURLParse = __webpack_require__(936).basicURLParse; +exports.setTheUsername = __webpack_require__(936).setTheUsername; +exports.setThePassword = __webpack_require__(936).setThePassword; +exports.serializeHost = __webpack_require__(936).serializeHost; +exports.serializeInteger = __webpack_require__(936).serializeInteger; +exports.parseURL = __webpack_require__(936).parseURL; + + +/***/ }), +/* 71 */, +/* 72 */, +/* 73 */, +/* 74 */, +/* 75 */, +/* 76 */, +/* 77 */, +/* 78 */, +/* 79 */, +/* 80 */, +/* 81 */, +/* 82 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), +/* 83 */, +/* 84 */, +/* 85 */, +/* 86 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var rng = __webpack_require__(139); +var bytesToUuid = __webpack_require__(722); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; + + +/***/ }), +/* 87 */ +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), +/* 88 */, +/* 89 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +const pubsuffix = __webpack_require__(562); + +// Gives the permutation of all possible domainMatch()es of a given domain. The +// array is in shortest-to-longest order. Handy for indexing. +const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761 +function permuteDomain(domain, allowSpecialUseDomain) { + let pubSuf = null; + if (allowSpecialUseDomain) { + const domainParts = domain.split("."); + if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) { + pubSuf = `${domainParts[domainParts.length - 2]}.${ + domainParts[domainParts.length - 1] + }`; + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + + if (!pubSuf) { + return null; + } + if (pubSuf == domain) { + return [domain]; + } + + const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" + const parts = prefix.split(".").reverse(); + let cur = pubSuf; + const permutations = [cur]; + while (parts.length) { + cur = `${parts.shift()}.${cur}`; + permutations.push(cur); + } + return permutations; +} + +exports.permuteDomain = permuteDomain; + + +/***/ }), +/* 90 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(__webpack_require__(241)); + +var _sha = _interopRequireDefault(__webpack_require__(616)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; + +/***/ }), +/* 91 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var serialOrdered = __webpack_require__(892); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} + + +/***/ }), +/* 92 */, +/* 93 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = __webpack_require__(622) +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __webpack_require__(306) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + + +/***/ }), +/* 94 */, +/* 95 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link.js.map + +/***/ }), +/* 96 */, +/* 97 */ +/***/ (function() { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +if (typeof Symbol === undefined || !Symbol.asyncIterator) { + Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +} +//# sourceMappingURL=index.js.map + +/***/ }), +/* 98 */, +/* 99 */, +/* 100 */, +/* 101 */, +/* 102 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.issueCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__webpack_require__(747)); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(82); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map + +/***/ }), +/* 103 */, +/* 104 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(__webpack_require__(676)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports.default = _default; + +/***/ }), +/* 105 */, +/* 106 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var tslib = __webpack_require__(640); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var listenersMap = new WeakMap(); +var abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +var AbortSignal = /** @class */ (function () { + function AbortSignal() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + Object.defineProperty(AbortSignal.prototype, "aborted", { + /** + * Status of whether aborted or not. + * + * @readonly + */ + get: function () { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(AbortSignal, "none", { + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + get: function () { + return new AbortSignal(); + }, + enumerable: false, + configurable: true + }); + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + AbortSignal.prototype.addEventListener = function ( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + var listeners = listenersMap.get(this); + listeners.push(listener); + }; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + AbortSignal.prototype.removeEventListener = function ( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + var listeners = listenersMap.get(this); + var index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + }; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + AbortSignal.prototype.dispatchEvent = function (_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + }; + return AbortSignal; +}()); +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + var listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach(function (listener) { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} + +// Copyright (c) Microsoft Corporation. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +var AbortError = /** @class */ (function (_super) { + tslib.__extends(AbortError, _super); + function AbortError(message) { + var _this = _super.call(this, message) || this; + _this.name = "AbortError"; + return _this; + } + return AbortError; +}(Error)); +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +var AbortController = /** @class */ (function () { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + function AbortController(parentSignals) { + var _this = this; + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) { + var parentSignal = parentSignals_1[_i]; + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", function () { + _this.abort(); + }); + } + } + } + Object.defineProperty(AbortController.prototype, "signal", { + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get: function () { + return this._signal; + }, + enumerable: false, + configurable: true + }); + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + AbortController.prototype.abort = function () { + abortSignal(this._signal); + }; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + AbortController.timeout = function (ms) { + var signal = new AbortSignal(); + var timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + }; + return AbortController; +}()); + +exports.AbortController = AbortController; +exports.AbortError = AbortError; +exports.AbortSignal = AbortSignal; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 107 */, +/* 108 */, +/* 109 */, +/* 110 */, +/* 111 */, +/* 112 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +var diag_1 = __webpack_require__(118); +var baggage_impl_1 = __webpack_require__(666); +var symbol_1 = __webpack_require__(561); +var diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map + +/***/ }), +/* 113 */, +/* 114 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const http_client_1 = __webpack_require__(539); +const auth_1 = __webpack_require__(226); +const crypto = __importStar(__webpack_require__(417)); +const fs = __importStar(__webpack_require__(747)); +const url_1 = __webpack_require__(835); +const utils = __importStar(__webpack_require__(15)); +const constants_1 = __webpack_require__(931); +const downloadUtils_1 = __webpack_require__(251); +const options_1 = __webpack_require__(538); +const requestUtils_1 = __webpack_require__(899); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + const baseUrl = process.env['ACTIONS_CACHE_URL'] || ''; + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod) { + const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip + ? [] + : [compressionMethod]); + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 204) { + return null; + } + if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = options_1.getDownloadOptions(options); + if (downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return response; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = options_1.getUploadOptions(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cacheHttpClient.js.map + +/***/ }), +/* 115 */, +/* 116 */, +/* 117 */, +/* 118 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagAPI = void 0; +var ComponentLogger_1 = __webpack_require__(362); +var logLevelLogger_1 = __webpack_require__(673); +var types_1 = __webpack_require__(545); +var global_utils_1 = __webpack_require__(525); +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + self.setLogger = function (logger, logLevel) { + var _a, _b; + if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + var oldLogger = global_utils_1.getGlobal('diag'); + var newLogger = logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger) { + var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return global_utils_1.registerGlobal('diag', newLogger, self, true); + }; + self.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map + +/***/ }), +/* 119 */, +/* 120 */, +/* 121 */, +/* 122 */, +/* 123 */, +/* 124 */, +/* 125 */, +/* 126 */, +/* 127 */, +/* 128 */, +/* 129 */ +/***/ (function(module) { + +module.exports = require("child_process"); + +/***/ }), +/* 130 */, +/* 131 */, +/* 132 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map + +/***/ }), +/* 133 */, +/* 134 */, +/* 135 */, +/* 136 */, +/* 137 */, +/* 138 */, +/* 139 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __webpack_require__(417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), +/* 140 */, +/* 141 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), +/* 142 */, +/* 143 */, +/* 144 */, +/* 145 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map + +/***/ }), +/* 146 */, +/* 147 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +exports.fromCallback = function (fn) { + return Object.defineProperty(function () { + if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments) + else { + return new Promise((resolve, reject) => { + arguments[arguments.length] = (err, res) => { + if (err) return reject(err) + resolve(res) + } + arguments.length++ + fn.apply(this, arguments) + }) + } + }, 'name', { value: fn.name }) +} + +exports.fromPromise = function (fn) { + return Object.defineProperty(function () { + const cb = arguments[arguments.length - 1] + if (typeof cb !== 'function') return fn.apply(this, arguments) + else fn.apply(this, arguments).then(r => cb(null, r), cb) + }, 'name', { value: fn.name }) +} + + +/***/ }), +/* 148 */, +/* 149 */ +/***/ (function(module) { + +"use strict"; + + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; +} + +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } +} + +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; + } +} + +conversions["void"] = function () { + return undefined; +}; + +conversions["boolean"] = function (val) { + return !!val; +}; + +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); + +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); + +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); + +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); + +conversions["double"] = function (V) { + const x = +V; + + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } + + return x; +}; + +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } + + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; + + +/***/ }), +/* 150 */, +/* 151 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracer = void 0; +var context_1 = __webpack_require__(492); +var context_utils_1 = __webpack_require__(720); +var NonRecordingSpan_1 = __webpack_require__(437); +var spancontext_utils_1 = __webpack_require__(629); +var context = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + var parentFromContext = context && context_utils_1.getSpanContext(context); + if (isSpanContext(parentFromContext) && + spancontext_utils_1.isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : context.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = context_utils_1.setSpan(parentContext, span); + return context.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map + +/***/ }), +/* 152 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var Stream = __webpack_require__(794).Stream; +var util = __webpack_require__(669); + +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; + + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); + +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } + + delayedStream.source = source; + + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; + + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } + + return delayedStream; +}; + +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); + +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; + +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } + + this.source.resume(); +}; + +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; + +DelayedStream.prototype.release = function() { + this._released = true; + + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; + +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; + +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } + + this._bufferedEvents.push(args); +}; + +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } + + if (this.dataSize <= this.maxDataSize) { + return; + } + + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; + + +/***/ }), +/* 153 */, +/* 154 */, +/* 155 */, +/* 156 */, +/* 157 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var async = __webpack_require__(751) + , abort = __webpack_require__(566) + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} + + +/***/ }), +/* 158 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Time.js.map + +/***/ }), +/* 159 */, +/* 160 */, +/* 161 */, +/* 162 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracerProvider = void 0; +var NoopTracer_1 = __webpack_require__(151); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version) { + return new NoopTracer_1.NoopTracer(); + }; + return NoopTracerProvider; +}()); +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map + +/***/ }), +/* 163 */, +/* 164 */, +/* 165 */, +/* 166 */, +/* 167 */, +/* 168 */, +/* 169 */, +/* 170 */, +/* 171 */, +/* 172 */, +/* 173 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(__webpack_require__(733)); + +var _stringify = _interopRequireDefault(__webpack_require__(855)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports.default = _default; + +/***/ }), +/* 174 */, +/* 175 */, +/* 176 */, +/* 177 */, +/* 178 */, +/* 179 */, +/* 180 */, +/* 181 */, +/* 182 */, +/* 183 */, +/* 184 */, +/* 185 */, +/* 186 */, +/* 187 */, +/* 188 */, +/* 189 */, +/* 190 */, +/* 191 */, +/* 192 */, +/* 193 */, +/* 194 */, +/* 195 */, +/* 196 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0; +var Inputs; +(function (Inputs) { + Inputs["Key"] = "key"; + Inputs["Path"] = "path"; + Inputs["RestoreKeys"] = "restore-keys"; + Inputs["UploadChunkSize"] = "upload-chunk-size"; +})(Inputs = exports.Inputs || (exports.Inputs = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); +var Events; +(function (Events) { + Events["Key"] = "GITHUB_EVENT_NAME"; + Events["Push"] = "push"; + Events["PullRequest"] = "pull_request"; +})(Events = exports.Events || (exports.Events = {})); +exports.RefKey = "GITHUB_REF"; + + +/***/ }), +/* 197 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(__webpack_require__(676)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; + +/***/ }), +/* 198 */, +/* 199 */, +/* 200 */, +/* 201 */, +/* 202 */, +/* 203 */, +/* 204 */, +/* 205 */, +/* 206 */, +/* 207 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=trace_state.js.map + +/***/ }), +/* 208 */, +/* 209 */, +/* 210 */ +/***/ (function(__unusedmodule, exports) { + +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + exports.stripBOM = function(str) { + if (str[0] === '\uFEFF') { + return str.substring(1); + } else { + return str; + } + }; + +}).call(this); + + +/***/ }), +/* 211 */ +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), +/* 212 */, +/* 213 */ +/***/ (function(module) { + +module.exports = require("timers"); + +/***/ }), +/* 214 */, +/* 215 */, +/* 216 */, +/* 217 */, +/* 218 */, +/* 219 */, +/* 220 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=SpanOptions.js.map + +/***/ }), +/* 221 */, +/* 222 */, +/* 223 */, +/* 224 */, +/* 225 */, +/* 226 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + + Buffer.from(this.username + ':' + this.password).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Bearer ' + this.token; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; + + +/***/ }), +/* 227 */, +/* 228 */, +/* 229 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * Note: The properties may be inherited. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + * @internal + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * Note: The property may be inherited. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + * @internal + */ +function objectHasProperty(thing, property) { + return typeof thing === "object" && property in thing; +} + +// Copyright (c) Microsoft Corporation. +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} + +// Copyright (c) Microsoft Corporation. +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +exports.AzureKeyCredential = AzureKeyCredential; +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +exports.AzureSASCredential = AzureSASCredential; +exports.isNamedKeyCredential = isNamedKeyCredential; +exports.isSASCredential = isSASCredential; +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 230 */, +/* 231 */, +/* 232 */, +/* 233 */, +/* 234 */, +/* 235 */, +/* 236 */, +/* 237 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagConsoleLogger = void 0; +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map + +/***/ }), +/* 238 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; + +/***/ }), +/* 239 */, +/* 240 */, +/* 241 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__webpack_require__(855)); + +var _parse = _interopRequireDefault(__webpack_require__(197)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), +/* 242 */, +/* 243 */, +/* 244 */, +/* 245 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(__webpack_require__(417)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; + +/***/ }), +/* 246 */, +/* 247 */, +/* 248 */, +/* 249 */, +/* 250 */, +/* 251 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const http_client_1 = __webpack_require__(539); +const storage_blob_1 = __webpack_require__(373); +const buffer = __importStar(__webpack_require__(293)); +const fs = __importStar(__webpack_require__(747)); +const stream = __importStar(__webpack_require__(794)); +const util = __importStar(__webpack_require__(669)); +const utils = __importStar(__webpack_require__(15)); +const constants_1 = __webpack_require__(931); +const requestUtils_1 = __webpack_require__(899); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. + const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + }); + fs.writeFileSync(fd, result); + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +//# sourceMappingURL=downloadUtils.js.map + +/***/ }), +/* 252 */, +/* 253 */, +/* 254 */, +/* 255 */, +/* 256 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +const usm = __webpack_require__(936); + +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + + // TODO: query stuff + } + + get href() { + return usm.serializeURL(this._url); + } + + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + } + + get origin() { + return usm.serializeURLOrigin(this._url); + } + + get protocol() { + return this._url.scheme + ":"; + } + + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } + + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setTheUsername(this._url, v); + } + + get password() { + return this._url.password; + } + + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; + } + + if (url.port === null) { + return usm.serializeHost(url.host); + } + + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } + + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } + } + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); + } + + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); + } + + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; + } + + set search(v) { + // TODO: query stuff + + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); + } + + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } + + return "#" + this._url.fragment; + } + + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); + } + + toJSON() { + return this.href; + } +}; + + +/***/ }), +/* 257 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var DocumentPosition, NodeType, XMLCData, XMLComment, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLNamedNodeMap, XMLNode, XMLNodeList, XMLProcessingInstruction, XMLRaw, XMLText, getValue, isEmpty, isFunction, isObject, ref1, + hasProp = {}.hasOwnProperty; + + ref1 = __webpack_require__(582), isObject = ref1.isObject, isFunction = ref1.isFunction, isEmpty = ref1.isEmpty, getValue = ref1.getValue; + + XMLElement = null; + + XMLCData = null; + + XMLComment = null; + + XMLDeclaration = null; + + XMLDocType = null; + + XMLRaw = null; + + XMLText = null; + + XMLProcessingInstruction = null; + + XMLDummy = null; + + NodeType = null; + + XMLNodeList = null; + + XMLNamedNodeMap = null; + + DocumentPosition = null; + + module.exports = XMLNode = (function() { + function XMLNode(parent1) { + this.parent = parent1; + if (this.parent) { + this.options = this.parent.options; + this.stringify = this.parent.stringify; + } + this.value = null; + this.children = []; + this.baseURI = null; + if (!XMLElement) { + XMLElement = __webpack_require__(796); + XMLCData = __webpack_require__(657); + XMLComment = __webpack_require__(919); + XMLDeclaration = __webpack_require__(738); + XMLDocType = __webpack_require__(735); + XMLRaw = __webpack_require__(660); + XMLText = __webpack_require__(708); + XMLProcessingInstruction = __webpack_require__(491); + XMLDummy = __webpack_require__(956); + NodeType = __webpack_require__(683); + XMLNodeList = __webpack_require__(265); + XMLNamedNodeMap = __webpack_require__(451); + DocumentPosition = __webpack_require__(65); + } + } + + Object.defineProperty(XMLNode.prototype, 'nodeName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeType', { + get: function() { + return this.type; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeValue', { + get: function() { + return this.value; + } + }); + + Object.defineProperty(XMLNode.prototype, 'parentNode', { + get: function() { + return this.parent; + } + }); + + Object.defineProperty(XMLNode.prototype, 'childNodes', { + get: function() { + if (!this.childNodeList || !this.childNodeList.nodes) { + this.childNodeList = new XMLNodeList(this.children); + } + return this.childNodeList; + } + }); + + Object.defineProperty(XMLNode.prototype, 'firstChild', { + get: function() { + return this.children[0] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'lastChild', { + get: function() { + return this.children[this.children.length - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'previousSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nextSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i + 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'ownerDocument', { + get: function() { + return this.document() || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'textContent', { + get: function() { + var child, j, len, ref2, str; + if (this.nodeType === NodeType.Element || this.nodeType === NodeType.DocumentFragment) { + str = ''; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (child.textContent) { + str += child.textContent; + } + } + return str; + } else { + return null; + } + }, + set: function(value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + XMLNode.prototype.setParent = function(parent) { + var child, j, len, ref2, results; + this.parent = parent; + if (parent) { + this.options = parent.options; + this.stringify = parent.stringify; + } + ref2 = this.children; + results = []; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + results.push(child.setParent(this)); + } + return results; + }; + + XMLNode.prototype.element = function(name, attributes, text) { + var childNode, item, j, k, key, lastChild, len, len1, ref2, ref3, val; + lastChild = null; + if (attributes === null && (text == null)) { + ref2 = [{}, null], attributes = ref2[0], text = ref2[1]; + } + if (attributes == null) { + attributes = {}; + } + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref3 = [attributes, text], text = ref3[0], attributes = ref3[1]; + } + if (name != null) { + name = getValue(name); + } + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + item = name[j]; + lastChild = this.element(item); + } + } else if (isFunction(name)) { + lastChild = this.element(name.apply()); + } else if (isObject(name)) { + for (key in name) { + if (!hasProp.call(name, key)) continue; + val = name[key]; + if (isFunction(val)) { + val = val.apply(); + } + if (!this.options.ignoreDecorators && this.stringify.convertAttKey && key.indexOf(this.stringify.convertAttKey) === 0) { + lastChild = this.attribute(key.substr(this.stringify.convertAttKey.length), val); + } else if (!this.options.separateArrayItems && Array.isArray(val) && isEmpty(val)) { + lastChild = this.dummy(); + } else if (isObject(val) && isEmpty(val)) { + lastChild = this.element(key); + } else if (!this.options.keepNullNodes && (val == null)) { + lastChild = this.dummy(); + } else if (!this.options.separateArrayItems && Array.isArray(val)) { + for (k = 0, len1 = val.length; k < len1; k++) { + item = val[k]; + childNode = {}; + childNode[key] = item; + lastChild = this.element(childNode); + } + } else if (isObject(val)) { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && key.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.element(val); + } else { + lastChild = this.element(key); + lastChild.element(val); + } + } else { + lastChild = this.element(key, val); + } + } + } else if (!this.options.keepNullNodes && text === null) { + lastChild = this.dummy(); + } else { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && name.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.text(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCDataKey && name.indexOf(this.stringify.convertCDataKey) === 0) { + lastChild = this.cdata(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCommentKey && name.indexOf(this.stringify.convertCommentKey) === 0) { + lastChild = this.comment(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertRawKey && name.indexOf(this.stringify.convertRawKey) === 0) { + lastChild = this.raw(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertPIKey && name.indexOf(this.stringify.convertPIKey) === 0) { + lastChild = this.instruction(name.substr(this.stringify.convertPIKey.length), text); + } else { + lastChild = this.node(name, attributes, text); + } + } + if (lastChild == null) { + throw new Error("Could not create any elements with: " + name + ". " + this.debugInfo()); + } + return lastChild; + }; + + XMLNode.prototype.insertBefore = function(name, attributes, text) { + var child, i, newChild, refChild, removed; + if (name != null ? name.type : void 0) { + newChild = name; + refChild = attributes; + newChild.setParent(this); + if (refChild) { + i = children.indexOf(refChild); + removed = children.splice(i); + children.push(newChild); + Array.prototype.push.apply(children, removed); + } else { + children.push(newChild); + } + return newChild; + } else { + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + } + }; + + XMLNode.prototype.insertAfter = function(name, attributes, text) { + var child, i, removed; + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + }; + + XMLNode.prototype.remove = function() { + var i, ref2; + if (this.isRoot) { + throw new Error("Cannot remove the root element. " + this.debugInfo()); + } + i = this.parent.children.indexOf(this); + [].splice.apply(this.parent.children, [i, i - i + 1].concat(ref2 = [])), ref2; + return this.parent; + }; + + XMLNode.prototype.node = function(name, attributes, text) { + var child, ref2; + if (name != null) { + name = getValue(name); + } + attributes || (attributes = {}); + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref2 = [attributes, text], text = ref2[0], attributes = ref2[1]; + } + child = new XMLElement(this, name, attributes); + if (text != null) { + child.text(text); + } + this.children.push(child); + return child; + }; + + XMLNode.prototype.text = function(value) { + var child; + if (isObject(value)) { + this.element(value); + } + child = new XMLText(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.cdata = function(value) { + var child; + child = new XMLCData(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.comment = function(value) { + var child; + child = new XMLComment(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.commentBefore = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.commentAfter = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.raw = function(value) { + var child; + child = new XMLRaw(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.dummy = function() { + var child; + child = new XMLDummy(this); + return child; + }; + + XMLNode.prototype.instruction = function(target, value) { + var insTarget, insValue, instruction, j, len; + if (target != null) { + target = getValue(target); + } + if (value != null) { + value = getValue(value); + } + if (Array.isArray(target)) { + for (j = 0, len = target.length; j < len; j++) { + insTarget = target[j]; + this.instruction(insTarget); + } + } else if (isObject(target)) { + for (insTarget in target) { + if (!hasProp.call(target, insTarget)) continue; + insValue = target[insTarget]; + this.instruction(insTarget, insValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + instruction = new XMLProcessingInstruction(this, target, value); + this.children.push(instruction); + } + return this; + }; + + XMLNode.prototype.instructionBefore = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.instructionAfter = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.declaration = function(version, encoding, standalone) { + var doc, xmldec; + doc = this.document(); + xmldec = new XMLDeclaration(doc, version, encoding, standalone); + if (doc.children.length === 0) { + doc.children.unshift(xmldec); + } else if (doc.children[0].type === NodeType.Declaration) { + doc.children[0] = xmldec; + } else { + doc.children.unshift(xmldec); + } + return doc.root() || doc; + }; + + XMLNode.prototype.dtd = function(pubID, sysID) { + var child, doc, doctype, i, j, k, len, len1, ref2, ref3; + doc = this.document(); + doctype = new XMLDocType(doc, pubID, sysID); + ref2 = doc.children; + for (i = j = 0, len = ref2.length; j < len; i = ++j) { + child = ref2[i]; + if (child.type === NodeType.DocType) { + doc.children[i] = doctype; + return doctype; + } + } + ref3 = doc.children; + for (i = k = 0, len1 = ref3.length; k < len1; i = ++k) { + child = ref3[i]; + if (child.isRoot) { + doc.children.splice(i, 0, doctype); + return doctype; + } + } + doc.children.push(doctype); + return doctype; + }; + + XMLNode.prototype.up = function() { + if (this.isRoot) { + throw new Error("The root node has no parent. Use doc() if you need to get the document object."); + } + return this.parent; + }; + + XMLNode.prototype.root = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node.rootObject; + } else if (node.isRoot) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.document = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.end = function(options) { + return this.document().end(options); + }; + + XMLNode.prototype.prev = function() { + var i; + i = this.parent.children.indexOf(this); + if (i < 1) { + throw new Error("Already at the first node. " + this.debugInfo()); + } + return this.parent.children[i - 1]; + }; + + XMLNode.prototype.next = function() { + var i; + i = this.parent.children.indexOf(this); + if (i === -1 || i === this.parent.children.length - 1) { + throw new Error("Already at the last node. " + this.debugInfo()); + } + return this.parent.children[i + 1]; + }; + + XMLNode.prototype.importDocument = function(doc) { + var clonedRoot; + clonedRoot = doc.root().clone(); + clonedRoot.parent = this; + clonedRoot.isRoot = false; + this.children.push(clonedRoot); + return this; + }; + + XMLNode.prototype.debugInfo = function(name) { + var ref2, ref3; + name = name || this.name; + if ((name == null) && !((ref2 = this.parent) != null ? ref2.name : void 0)) { + return ""; + } else if (name == null) { + return "parent: <" + this.parent.name + ">"; + } else if (!((ref3 = this.parent) != null ? ref3.name : void 0)) { + return "node: <" + name + ">"; + } else { + return "node: <" + name + ">, parent: <" + this.parent.name + ">"; + } + }; + + XMLNode.prototype.ele = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.nod = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.txt = function(value) { + return this.text(value); + }; + + XMLNode.prototype.dat = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.com = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.ins = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.doc = function() { + return this.document(); + }; + + XMLNode.prototype.dec = function(version, encoding, standalone) { + return this.declaration(version, encoding, standalone); + }; + + XMLNode.prototype.e = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.n = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.t = function(value) { + return this.text(value); + }; + + XMLNode.prototype.d = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.c = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.r = function(value) { + return this.raw(value); + }; + + XMLNode.prototype.i = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.u = function() { + return this.up(); + }; + + XMLNode.prototype.importXMLBuilder = function(doc) { + return this.importDocument(doc); + }; + + XMLNode.prototype.replaceChild = function(newChild, oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.removeChild = function(oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.appendChild = function(newChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.hasChildNodes = function() { + return this.children.length !== 0; + }; + + XMLNode.prototype.cloneNode = function(deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.normalize = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isSupported = function(feature, version) { + return true; + }; + + XMLNode.prototype.hasAttributes = function() { + return this.attribs.length !== 0; + }; + + XMLNode.prototype.compareDocumentPosition = function(other) { + var ref, res; + ref = this; + if (ref === other) { + return 0; + } else if (this.document() !== other.document()) { + res = DocumentPosition.Disconnected | DocumentPosition.ImplementationSpecific; + if (Math.random() < 0.5) { + res |= DocumentPosition.Preceding; + } else { + res |= DocumentPosition.Following; + } + return res; + } else if (ref.isAncestor(other)) { + return DocumentPosition.Contains | DocumentPosition.Preceding; + } else if (ref.isDescendant(other)) { + return DocumentPosition.Contains | DocumentPosition.Following; + } else if (ref.isPreceding(other)) { + return DocumentPosition.Preceding; + } else { + return DocumentPosition.Following; + } + }; + + XMLNode.prototype.isSameNode = function(other) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupPrefix = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isDefaultNamespace = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupNamespaceURI = function(prefix) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isEqualNode = function(node) { + var i, j, ref2; + if (node.nodeType !== this.nodeType) { + return false; + } + if (node.children.length !== this.children.length) { + return false; + } + for (i = j = 0, ref2 = this.children.length - 1; 0 <= ref2 ? j <= ref2 : j >= ref2; i = 0 <= ref2 ? ++j : --j) { + if (!this.children[i].isEqualNode(node.children[i])) { + return false; + } + } + return true; + }; + + XMLNode.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.setUserData = function(key, data, handler) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.getUserData = function(key) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.contains = function(other) { + if (!other) { + return false; + } + return other === this || this.isDescendant(other); + }; + + XMLNode.prototype.isDescendant = function(node) { + var child, isDescendantChild, j, len, ref2; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (node === child) { + return true; + } + isDescendantChild = child.isDescendant(node); + if (isDescendantChild) { + return true; + } + } + return false; + }; + + XMLNode.prototype.isAncestor = function(node) { + return node.isDescendant(this); + }; + + XMLNode.prototype.isPreceding = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos < thisPos; + } + }; + + XMLNode.prototype.isFollowing = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos > thisPos; + } + }; + + XMLNode.prototype.treePosition = function(node) { + var found, pos; + pos = 0; + found = false; + this.foreachTreeNode(this.document(), function(childNode) { + pos++; + if (!found && childNode === node) { + return found = true; + } + }); + if (found) { + return pos; + } else { + return -1; + } + }; + + XMLNode.prototype.foreachTreeNode = function(node, func) { + var child, j, len, ref2, res; + node || (node = this.document()); + ref2 = node.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (res = func(child)) { + return res; + } else { + res = this.foreachTreeNode(child, func); + if (res) { + return res; + } + } + } + }; + + return XMLNode; + + })(); + +}).call(this); + + +/***/ }), +/* 258 */, +/* 259 */, +/* 260 */, +/* 261 */, +/* 262 */, +/* 263 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var api = __webpack_require__(440); + +// Copyright (c) Microsoft Corporation. +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(exports.SpanKind || (exports.SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +function getSpan(context) { + return api.trace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +function setSpan(context, span) { + return api.trace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return api.trace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +function getSpanContext(context) { + return api.trace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +function isSpanContextValid(context) { + return api.trace.isSpanContextValid(context); +} +function getTracer(name, version) { + return api.trace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +const context = api.context; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); + +// Copyright (c) Microsoft Corporation. +function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} + +exports.context = context; +exports.createSpanFunction = createSpanFunction; +exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; +exports.getSpan = getSpan; +exports.getSpanContext = getSpanContext; +exports.getTraceParentHeader = getTraceParentHeader; +exports.getTracer = getTracer; +exports.isSpanContextValid = isSpanContextValid; +exports.setSpan = setSpan; +exports.setSpanContext = setSpanContext; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 264 */, +/* 265 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNodeList; + + module.exports = XMLNodeList = (function() { + function XMLNodeList(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNodeList.prototype, 'length', { + get: function() { + return this.nodes.length || 0; + } + }); + + XMLNodeList.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNodeList.prototype.item = function(index) { + return this.nodes[index] || null; + }; + + return XMLNodeList; + + })(); + +}).call(this); + + +/***/ }), +/* 266 */, +/* 267 */, +/* 268 */, +/* 269 */, +/* 270 */, +/* 271 */, +/* 272 */, +/* 273 */, +/* 274 */, +/* 275 */, +/* 276 */, +/* 277 */, +/* 278 */, +/* 279 */, +/* 280 */ +/***/ (function(module, exports) { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} + + +/***/ }), +/* 281 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const internal_globber_1 = __webpack_require__(297); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map + +/***/ }), +/* 282 */, +/* 283 */, +/* 284 */, +/* 285 */, +/* 286 */, +/* 287 */, +/* 288 */, +/* 289 */, +/* 290 */, +/* 291 */, +/* 292 */, +/* 293 */ +/***/ (function(module) { + +module.exports = require("buffer"); + +/***/ }), +/* 294 */, +/* 295 */, +/* 296 */, +/* 297 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultGlobber = void 0; +const core = __importStar(__webpack_require__(470)); +const fs = __importStar(__webpack_require__(747)); +const globOptionsHelper = __importStar(__webpack_require__(601)); +const path = __importStar(__webpack_require__(622)); +const patternHelper = __importStar(__webpack_require__(597)); +const internal_match_kind_1 = __webpack_require__(327); +const internal_pattern_1 = __webpack_require__(923); +const internal_search_state_1 = __webpack_require__(728); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), +/* 298 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(__webpack_require__(241)); + +var _md = _interopRequireDefault(__webpack_require__(245)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; + +/***/ }), +/* 299 */, +/* 300 */, +/* 301 */, +/* 302 */, +/* 303 */, +/* 304 */ +/***/ (function(module) { + +module.exports = require("string_decoder"); + +/***/ }), +/* 305 */, +/* 306 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var concatMap = __webpack_require__(896); +var balanced = __webpack_require__(621); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + + +/***/ }), +/* 307 */, +/* 308 */, +/* 309 */, +/* 310 */, +/* 311 */, +/* 312 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLDOMImplementation, XMLDocument, XMLDocumentCB, XMLStreamWriter, XMLStringWriter, assign, isFunction, ref; + + ref = __webpack_require__(582), assign = ref.assign, isFunction = ref.isFunction; + + XMLDOMImplementation = __webpack_require__(515); + + XMLDocument = __webpack_require__(559); + + XMLDocumentCB = __webpack_require__(768); + + XMLStringWriter = __webpack_require__(750); + + XMLStreamWriter = __webpack_require__(458); + + NodeType = __webpack_require__(683); + + WriterState = __webpack_require__(541); + + module.exports.create = function(name, xmldec, doctype, options) { + var doc, root; + if (name == null) { + throw new Error("Root element needs a name."); + } + options = assign({}, xmldec, doctype, options); + doc = new XMLDocument(options); + root = doc.element(name); + if (!options.headless) { + doc.declaration(options); + if ((options.pubID != null) || (options.sysID != null)) { + doc.dtd(options); + } + } + return root; + }; + + module.exports.begin = function(options, onData, onEnd) { + var ref1; + if (isFunction(options)) { + ref1 = [options, onData], onData = ref1[0], onEnd = ref1[1]; + options = {}; + } + if (onData) { + return new XMLDocumentCB(options, onData, onEnd); + } else { + return new XMLDocument(options); + } + }; + + module.exports.stringWriter = function(options) { + return new XMLStringWriter(options); + }; + + module.exports.streamWriter = function(stream, options) { + return new XMLStreamWriter(stream, options); + }; + + module.exports.implementation = new XMLDOMImplementation(); + + module.exports.nodeType = NodeType; + + module.exports.writerState = WriterState; + +}).call(this); + + +/***/ }), +/* 313 */, +/* 314 */, +/* 315 */, +/* 316 */, +/* 317 */, +/* 318 */, +/* 319 */, +/* 320 */, +/* 321 */, +/* 322 */, +/* 323 */, +/* 324 */, +/* 325 */, +/* 326 */, +/* 327 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), +/* 328 */, +/* 329 */, +/* 330 */, +/* 331 */, +/* 332 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +const { fromCallback } = __webpack_require__(147); +const Store = __webpack_require__(338).Store; +const permuteDomain = __webpack_require__(89).permuteDomain; +const pathMatch = __webpack_require__(348).pathMatch; +const util = __webpack_require__(669); + +class MemoryCookieStore extends Store { + constructor() { + super(); + this.synchronous = true; + this.idx = {}; + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + } + + inspect() { + return `{ idx: ${util.inspect(this.idx, false, 2)} }`; + } + + findCookie(domain, path, key, cb) { + if (!this.idx[domain]) { + return cb(null, undefined); + } + if (!this.idx[domain][path]) { + return cb(null, undefined); + } + return cb(null, this.idx[domain][path][key] || null); + } + findCookies(domain, path, allowSpecialUseDomain, cb) { + const results = []; + if (typeof allowSpecialUseDomain === "function") { + cb = allowSpecialUseDomain; + allowSpecialUseDomain = false; + } + if (!domain) { + return cb(null, []); + } + + let pathMatcher; + if (!path) { + // null means "all paths" + pathMatcher = function matchAll(domainIndex) { + for (const curPath in domainIndex) { + const pathIndex = domainIndex[curPath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }; + } else { + pathMatcher = function matchRFC(domainIndex) { + //NOTE: we should use path-match algorithm from S5.1.4 here + //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) + Object.keys(domainIndex).forEach(cookiePath => { + if (pathMatch(path, cookiePath)) { + const pathIndex = domainIndex[cookiePath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }); + }; + } + + const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain]; + const idx = this.idx; + domains.forEach(curDomain => { + const domainIndex = idx[curDomain]; + if (!domainIndex) { + return; + } + pathMatcher(domainIndex); + }); + + cb(null, results); + } + + putCookie(cookie, cb) { + if (!this.idx[cookie.domain]) { + this.idx[cookie.domain] = {}; + } + if (!this.idx[cookie.domain][cookie.path]) { + this.idx[cookie.domain][cookie.path] = {}; + } + this.idx[cookie.domain][cookie.path][cookie.key] = cookie; + cb(null); + } + updateCookie(oldCookie, newCookie, cb) { + // updateCookie() may avoid updating cookies that are identical. For example, + // lastAccessed may not be important to some stores and an equality + // comparison could exclude that field. + this.putCookie(newCookie, cb); + } + removeCookie(domain, path, key, cb) { + if ( + this.idx[domain] && + this.idx[domain][path] && + this.idx[domain][path][key] + ) { + delete this.idx[domain][path][key]; + } + cb(null); + } + removeCookies(domain, path, cb) { + if (this.idx[domain]) { + if (path) { + delete this.idx[domain][path]; + } else { + delete this.idx[domain]; + } + } + return cb(null); + } + removeAllCookies(cb) { + this.idx = {}; + return cb(null); + } + getAllCookies(cb) { + const cookies = []; + const idx = this.idx; + + const domains = Object.keys(idx); + domains.forEach(domain => { + const paths = Object.keys(idx[domain]); + paths.forEach(path => { + const keys = Object.keys(idx[domain][path]); + keys.forEach(key => { + if (key !== null) { + cookies.push(idx[domain][path][key]); + } + }); + }); + }); + + // Sort by creationIndex so deserializing retains the creation order. + // When implementing your own store, this SHOULD retain the order too + cookies.sort((a, b) => { + return (a.creationIndex || 0) - (b.creationIndex || 0); + }); + + cb(null, cookies); + } +} + +[ + "findCookie", + "findCookies", + "putCookie", + "updateCookie", + "removeCookie", + "removeCookies", + "removeAllCookies", + "getAllCookies" +].forEach(name => { + MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]); +}); + +exports.MemoryCookieStore = MemoryCookieStore; + + +/***/ }), +/* 333 */, +/* 334 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = +{ + parallel : __webpack_require__(424), + serial : __webpack_require__(91), + serialOrdered : __webpack_require__(892) +}; + + +/***/ }), +/* 335 */, +/* 336 */, +/* 337 */, +/* 338 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +/*jshint unused:false */ + +class Store { + constructor() { + this.synchronous = false; + } + + findCookie(domain, path, key, cb) { + throw new Error("findCookie is not implemented"); + } + + findCookies(domain, path, allowSpecialUseDomain, cb) { + throw new Error("findCookies is not implemented"); + } + + putCookie(cookie, cb) { + throw new Error("putCookie is not implemented"); + } + + updateCookie(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error("updateCookie is not implemented"); + } + + removeCookie(domain, path, key, cb) { + throw new Error("removeCookie is not implemented"); + } + + removeCookies(domain, path, cb) { + throw new Error("removeCookies is not implemented"); + } + + removeAllCookies(cb) { + throw new Error("removeAllCookies is not implemented"); + } + + getAllCookies(cb) { + throw new Error( + "getAllCookies is not implemented (therefore jar cannot be serialized)" + ); + } +} + +exports.Store = Store; + + +/***/ }), +/* 339 */, +/* 340 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SamplingDecision = void 0; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map + +/***/ }), +/* 341 */, +/* 342 */, +/* 343 */, +/* 344 */, +/* 345 */, +/* 346 */, +/* 347 */, +/* 348 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * "A request-path path-matches a given cookie-path if at least one of the + * following conditions holds:" + */ +function pathMatch(reqPath, cookiePath) { + // "o The cookie-path and the request-path are identical." + if (cookiePath === reqPath) { + return true; + } + + const idx = reqPath.indexOf(cookiePath); + if (idx === 0) { + // "o The cookie-path is a prefix of the request-path, and the last + // character of the cookie-path is %x2F ("/")." + if (cookiePath.substr(-1) === "/") { + return true; + } + + // " o The cookie-path is a prefix of the request-path, and the first + // character of the request-path that is not included in the cookie- path + // is a %x2F ("/") character." + if (reqPath.substr(cookiePath.length, 1) === "/") { + return true; + } + } + + return false; +} + +exports.pathMatch = pathMatch; + + +/***/ }), +/* 349 */, +/* 350 */ +/***/ (function(__unusedmodule, exports) { + +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var prefixMatch; + + prefixMatch = new RegExp(/(?!xmlns)^.*:/); + + exports.normalize = function(str) { + return str.toLowerCase(); + }; + + exports.firstCharLowerCase = function(str) { + return str.charAt(0).toLowerCase() + str.slice(1); + }; + + exports.stripPrefix = function(str) { + return str.replace(prefixMatch, ''); + }; + + exports.parseNumbers = function(str) { + if (!isNaN(str)) { + str = str % 1 === 0 ? parseInt(str, 10) : parseFloat(str); + } + return str; + }; + + exports.parseBooleans = function(str) { + if (/^(?:true|false)$/i.test(str)) { + str = str.toLowerCase() === 'true'; + } + return str; + }; + +}).call(this); + + +/***/ }), +/* 351 */, +/* 352 */, +/* 353 */, +/* 354 */, +/* 355 */, +/* 356 */, +/* 357 */ +/***/ (function(module) { + +module.exports = require("assert"); + +/***/ }), +/* 358 */, +/* 359 */, +/* 360 */, +/* 361 */, +/* 362 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagComponentLogger = void 0; +var global_utils_1 = __webpack_require__(525); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, args); +} +//# sourceMappingURL=ComponentLogger.js.map + +/***/ }), +/* 363 */, +/* 364 */, +/* 365 */, +/* 366 */, +/* 367 */, +/* 368 */, +/* 369 */, +/* 370 */, +/* 371 */, +/* 372 */, +/* 373 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var coreHttp = __webpack_require__(999); +var tslib = __webpack_require__(608); +var coreTracing = __webpack_require__(263); +var logger$1 = __webpack_require__(928); +var abortController = __webpack_require__(106); +var os = __webpack_require__(87); +var crypto = __webpack_require__(417); +var stream = __webpack_require__(794); +__webpack_require__(510); +var coreLro = __webpack_require__(889); +var events = __webpack_require__(614); +var fs = __webpack_require__(747); +var util = __webpack_require__(669); + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + } + } + } +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + BlobServiceProperties: BlobServiceProperties, + Logging: Logging, + RetentionPolicy: RetentionPolicy, + Metrics: Metrics, + CorsRule: CorsRule, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + BlobServiceStatistics: BlobServiceStatistics, + GeoReplication: GeoReplication, + ListContainersSegmentResponse: ListContainersSegmentResponse, + ContainerItem: ContainerItem, + ContainerProperties: ContainerProperties, + KeyInfo: KeyInfo, + UserDelegationKey: UserDelegationKey, + FilterBlobSegment: FilterBlobSegment, + FilterBlobItem: FilterBlobItem, + BlobTags: BlobTags, + BlobTag: BlobTag, + SignedIdentifier: SignedIdentifier, + AccessPolicy: AccessPolicy, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + BlobFlatListSegment: BlobFlatListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPropertiesInternal: BlobPropertiesInternal, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobPrefix: BlobPrefix, + BlockLookupList: BlockLookupList, + BlockList: BlockList, + Block: Block, + PageList: PageList, + PageRange: PageRange, + ClearRange: ClearRange, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + QueryFormat: QueryFormat, + DelimitedTextConfiguration: DelimitedTextConfiguration, + JsonTextConfiguration: JsonTextConfiguration, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2021-04-10", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Service. */ +class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$5 +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Container. */ +class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$4 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Blob. */ +class Blob$1 { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders + }, + 202: { + headersMapper: BlobSetTierHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$3 +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders + } + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders + } + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a PageBlob. */ +class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer: serializer$2 +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp20 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp20, + prevsnapshot + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource + ], + isXML: true, + serializer: xmlSerializer$2 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a AppendBlob. */ +class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition + ], + mediaType: "binary", + serializer: serializer$1 +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition + ], + isXML: true, + serializer: xmlSerializer$1 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a BlockBlob. */ +class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + contentType1, + accept2, + blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + blobType2, + copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer +}; + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "12.9.0"; +const SERVICE_VERSION = "2021-04-10"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; + +// Copyright (c) Microsoft Corporation. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + const accountName = getAccountNameFromUrl(blobEndpoint); + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = coreHttp.URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port), use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobItem(blobInXML) { + const blobPropertiesInXML = blobInXML["Properties"]; + const blobProperties = { + createdOn: new Date(blobPropertiesInXML["Creation-Time"]), + lastModified: new Date(blobPropertiesInXML["Last-Modified"]), + etag: blobPropertiesInXML["Etag"], + contentLength: blobPropertiesInXML["Content-Length"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["Content-Length"]), + contentType: blobPropertiesInXML["Content-Type"], + contentEncoding: blobPropertiesInXML["Content-Encoding"], + contentLanguage: blobPropertiesInXML["Content-Language"], + contentMD5: decodeBase64String(blobPropertiesInXML["Content-MD5"]), + contentDisposition: blobPropertiesInXML["Content-Disposition"], + cacheControl: blobPropertiesInXML["Cache-Control"], + blobSequenceNumber: blobPropertiesInXML["x-ms-blob-sequence-number"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]), + blobType: blobPropertiesInXML["BlobType"], + leaseStatus: blobPropertiesInXML["LeaseStatus"], + leaseState: blobPropertiesInXML["LeaseState"], + leaseDuration: blobPropertiesInXML["LeaseDuration"], + copyId: blobPropertiesInXML["CopyId"], + copyStatus: blobPropertiesInXML["CopyStatus"], + copySource: blobPropertiesInXML["CopySource"], + copyProgress: blobPropertiesInXML["CopyProgress"], + copyCompletedOn: blobPropertiesInXML["CopyCompletionTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["CopyCompletionTime"]), + copyStatusDescription: blobPropertiesInXML["CopyStatusDescription"], + serverEncrypted: ParseBoolean(blobPropertiesInXML["ServerEncrypted"]), + incrementalCopy: ParseBoolean(blobPropertiesInXML["IncrementalCopy"]), + destinationSnapshot: blobPropertiesInXML["DestinationSnapshot"], + deletedOn: blobPropertiesInXML["DeletedTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["DeletedTime"]), + remainingRetentionDays: blobPropertiesInXML["RemainingRetentionDays"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["RemainingRetentionDays"]), + accessTier: blobPropertiesInXML["AccessTier"], + accessTierInferred: ParseBoolean(blobPropertiesInXML["AccessTierInferred"]), + archiveStatus: blobPropertiesInXML["ArchiveStatus"], + customerProvidedKeySha256: blobPropertiesInXML["CustomerProvidedKeySha256"], + encryptionScope: blobPropertiesInXML["EncryptionScope"], + accessTierChangedOn: blobPropertiesInXML["AccessTierChangeTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["AccessTierChangeTime"]), + tagCount: blobPropertiesInXML["TagCount"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["TagCount"]), + expiresOn: blobPropertiesInXML["Expiry-Time"] === undefined + ? undefined + : new Date(blobPropertiesInXML["Expiry-Time"]), + isSealed: ParseBoolean(blobPropertiesInXML["Sealed"]), + rehydratePriority: blobPropertiesInXML["RehydratePriority"], + lastAccessedOn: blobPropertiesInXML["LastAccessTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["LastAccessTime"]), + immutabilityPolicyExpiresOn: blobPropertiesInXML["ImmutabilityPolicyUntilDate"] === undefined + ? undefined + : new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]), + immutabilityPolicyMode: blobPropertiesInXML["ImmutabilityPolicyMode"], + legalHold: ParseBoolean(blobPropertiesInXML["LegalHold"]), + }; + return { + name: ParseBlobName(blobInXML["Name"]), + deleted: ParseBoolean(blobInXML["Deleted"]), + snapshot: blobInXML["Snapshot"], + versionId: blobInXML["VersionId"], + isCurrentVersion: ParseBoolean(blobInXML["IsCurrentVersion"]), + properties: blobProperties, + metadata: blobInXML["Metadata"], + blobTags: ParseBlobTags(blobInXML["Tags"]), + objectReplicationMetadata: blobInXML["OrMetadata"], + hasVersionsOnly: ParseBoolean(blobInXML["HasVersionsOnly"]), + }; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends coreHttp.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +class TelemetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (coreHttp.isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + const runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} + +// Copyright (c) Microsoft Corporation. +const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + coreHttp.generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), + coreHttp.logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (coreHttp.isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(coreHttp.disableResponseDecompressionPolicy()); + } + factories.push(coreHttp.isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const packageName = "azure-storage-blob"; +const packageVersion = "12.9.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2021-04-10"; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (coreHttp.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a span using the global tracer. + * @internal + */ +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} + +// Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} + +// Copyright (c) Microsoft Corporation. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new Blob$1(clientContext); + } + if (!leaseId) { + leaseId = coreHttp.generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length != b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length != length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b == 1) { + return true; + } + else if (b == 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return await stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + // polyfill TextDecoder to be backward compatible with older + // nodejs that doesn't expose TextDecoder as a global variable + if (typeof TextDecoder === "undefined" && "function" !== "undefined") { + global.TextDecoder = __webpack_require__(669).TextDecoder; + } + // FUTURE: need TextDecoder polyfill for IE + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = async (stream, options = {}) => { + return await AvroParser.readMapPair(stream, readItemMethod, options); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count != 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + async read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return await AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return await AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return await AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return await AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return await AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return await AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return await AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return await AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return await this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + async read(stream, options = {}) { + const readItemMethod = async (s, options) => { + return await this._itemType.read(s, options); + }; + return await AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (this._fields.hasOwnProperty(key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} + +// Copyright (c) Microsoft Corporation. +class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec == undefined || codec == "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset == 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock == 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + cleanUp(); + // chunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(chunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + }); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreHttp.delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = __webpack_require__(293).constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + stream.on("readable", () => { + if (pos >= count) { + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreHttp.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) + ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} + +Object.defineProperty(exports, 'BaseRequestPolicy', { + enumerable: true, + get: function () { return coreHttp.BaseRequestPolicy; } +}); +Object.defineProperty(exports, 'HttpHeaders', { + enumerable: true, + get: function () { return coreHttp.HttpHeaders; } +}); +Object.defineProperty(exports, 'RequestPolicyOptions', { + enumerable: true, + get: function () { return coreHttp.RequestPolicyOptions; } +}); +Object.defineProperty(exports, 'RestError', { + enumerable: true, + get: function () { return coreHttp.RestError; } +}); +Object.defineProperty(exports, 'WebResource', { + enumerable: true, + get: function () { return coreHttp.WebResource; } +}); +Object.defineProperty(exports, 'deserializationPolicy', { + enumerable: true, + get: function () { return coreHttp.deserializationPolicy; } +}); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 374 */, +/* 375 */, +/* 376 */, +/* 377 */, +/* 378 */, +/* 379 */, +/* 380 */, +/* 381 */, +/* 382 */, +/* 383 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Path = void 0; +const path = __importStar(__webpack_require__(622)); +const pathHelper = __importStar(__webpack_require__(972)); +const assert_1 = __importDefault(__webpack_require__(357)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map + +/***/ }), +/* 384 */, +/* 385 */, +/* 386 */, +/* 387 */, +/* 388 */, +/* 389 */, +/* 390 */, +/* 391 */, +/* 392 */, +/* 393 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +const punycode = __webpack_require__(815); +const urlParse = __webpack_require__(835).parse; +const util = __webpack_require__(669); +const pubsuffix = __webpack_require__(562); +const Store = __webpack_require__(338).Store; +const MemoryCookieStore = __webpack_require__(332).MemoryCookieStore; +const pathMatch = __webpack_require__(348).pathMatch; +const VERSION = __webpack_require__(460); +const { fromCallback } = __webpack_require__(147); + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; + +const CONTROL_CHARS = /[\x00-\x1F]/; + +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +const TERMINATORS = ["\n", "\r", "\0"]; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +// date-time parsing constants (RFC6265 S5.1.1) + +const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +const MONTH_TO_NUM = { + jan: 0, + feb: 1, + mar: 2, + apr: 3, + may: 4, + jun: 5, + jul: 6, + aug: 7, + sep: 8, + oct: 9, + nov: 10, + dec: 11 +}; + +const MAX_TIME = 2147483647000; // 31-bit max +const MIN_TIME = 0; // 31-bit min +const SAME_SITE_CONTEXT_VAL_ERR = + 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"'; + +function checkSameSiteContext(value) { + const context = String(value).toLowerCase(); + if (context === "none" || context === "lax" || context === "strict") { + return context; + } else { + return null; + } +} + +const PrefixSecurityEnum = Object.freeze({ + SILENT: "silent", + STRICT: "strict", + DISABLED: "unsafe-disabled" +}); + +// Dumped from ip-regex@4.0.0, with the following changes: +// * all capturing groups converted to non-capturing -- "(?:)" +// * support for IPv6 Scoped Literal ("%eth1") removed +// * lowercase hexadecimal only +var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/; + +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + let count = 0; + while (count < token.length) { + const c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2f || c >= 0x3a) { + break; + } + count++; + } + + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; + } + + if (!trailingOK && count != token.length) { + return null; + } + + return parseInt(token.substr(0, count), 10); +} + +function parseTime(token) { + const parts = token.split(":"); + const result = [0, 0, 0]; + + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ + + if (parts.length !== 3) { + return null; + } + + for (let i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + const trailingOK = i == 2; + const num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; + } + result[i] = num; + } + + return result; +} + +function parseMonth(token) { + token = String(token) + .substr(0, 3) + .toLowerCase(); + const num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; +} + +/* + * RFC6265 S5.1.1 date parser (see RFC for full grammar) + */ +function parseDate(str) { + if (!str) { + return; + } + + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + const tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + let hour = null; + let minute = null; + let second = null; + let dayOfMonth = null; + let month = null; + let year = null; + + for (let i = 0; i < tokens.length; i++) { + const token = tokens[i].trim(); + if (!token.length) { + continue; + } + + let result; + + /* 2.1. If the found-time flag is not set and the token matches the time + * production, set the found-time flag and set the hour- value, + * minute-value, and second-value to the numbers denoted by the digits in + * the date-token, respectively. Skip the remaining sub-steps and continue + * to the next date-token. + */ + if (second === null) { + result = parseTime(token); + if (result) { + hour = result[0]; + minute = result[1]; + second = result[2]; + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (dayOfMonth === null) { + // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 1, 2, true); + if (result !== null) { + dayOfMonth = result; + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = parseMonth(token); + if (result !== null) { + month = result; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the + * year production, set the found-year flag and set the year-value to the + * number denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (year === null) { + // "year = 2*4DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 2, 4, true); + if (result !== null) { + year = result; + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (year >= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } + } + } + + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || + month === null || + year === null || + second === null || + dayOfMonth < 1 || + dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } + + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} + +function formatDate(date) { + return date.toUTCString(); +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * S5.1.3: + * "A string domain-matches a given domain string if at least one of the + * following conditions hold:" + * + * " o The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* " o All of the following [three] conditions hold:" */ + + /* "* The domain string is a suffix of the string" */ + const idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // next, check it's a proper suffix + // e.g., "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { + return false; // it's not a suffix + } + + /* " * The last character of the string that is not included in the + * domain string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; // doesn't align on "." + } + + /* " * The string is a host name (i.e., not an IP address)." */ + if (IP_REGEX_LOWERCASE.test(str)) { + return false; // it's an IP address + } + + return true; +} + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0, 1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + const rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + +function trimTerminator(str) { + for (let t = 0; t < TERMINATORS.length; t++) { + const terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0, terminatorIdx); + } + } + + return str; +} + +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); + + let firstEq = cookiePair.indexOf("="); + if (looseMode) { + if (firstEq === 0) { + // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf("="); // might still need to split on '=' + } + } else { + // non-loose mode + if (firstEq <= 0) { + // no '=' or is at start + return; // needs to have non-empty "cookie-name" + } + } + + let cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq + 1).trim(); + } + + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } + + const c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; +} + +function parse(str, options) { + if (!options || typeof options !== "object") { + options = {}; + } + str = str.trim(); + + // We use a regex to parse the "name-value-pair" part of S5.2 + const firstSemi = str.indexOf(";"); // S5.2 step 1 + const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi); + const c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + const unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + const cookie_avs = unparsed.split(";"); + while (cookie_avs.length) { + const av = cookie_avs.shift().trim(); + if (av.length === 0) { + // happens if ";;" appears + continue; + } + const av_sep = av.indexOf("="); + let av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0, av_sep); + av_value = av.substr(av_sep + 1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch (av_key) { + case "expires": // S5.2.1 + if (av_value) { + const exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case "max-age": // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + const delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case "domain": // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + const domain = av_value.trim().replace(/^\./, ""); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case "path": // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case "secure": // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case "httponly": // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + case "samesite": // RFC6265bis-02 S5.3.7 + const enforcement = av_value ? av_value.toLowerCase() : ""; + switch (enforcement) { + case "strict": + c.sameSite = "strict"; + break; + case "lax": + c.sameSite = "lax"; + break; + default: + // RFC6265bis-02 S5.3.7 step 1: + // "If cookie-av's attribute-value is not a case-insensitive match + // for "Strict" or "Lax", ignore the "cookie-av"." + // This effectively sets it to 'none' from the prototype. + break; + } + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Secure-", abort these steps and ignore the cookie + * entirely unless the cookie's secure-only-flag is true. + * @param cookie + * @returns boolean + */ +function isSecurePrefixConditionMet(cookie) { + return !cookie.key.startsWith("__Secure-") || cookie.secure; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Host-", abort these steps and ignore the cookie + * entirely unless the cookie meets all the following criteria: + * 1. The cookie's secure-only-flag is true. + * 2. The cookie's host-only-flag is true. + * 3. The cookie-attribute-list contains an attribute with an + * attribute-name of "Path", and the cookie's path is "/". + * @param cookie + * @returns boolean + */ +function isHostPrefixConditionMet(cookie) { + return ( + !cookie.key.startsWith("__Host-") || + (cookie.secure && + cookie.hostOnly && + cookie.path != null && + cookie.path === "/") + ); +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + let obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + let obj; + if (typeof str === "string") { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + const c = new Cookie(); + for (let i = 0; i < Cookie.serializableProperties.length; i++) { + const prop = Cookie.serializableProperties[i]; + if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if (prop === "expires" || prop === "creation" || prop === "lastAccessed") { + if (obj[prop] === null) { + c[prop] = null; + } else { + c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]); + } + } else { + c[prop] = obj[prop]; + } + } + + return c; +} + +/* Section 5.4 part 2: + * "* Cookies with longer paths are listed before cookies with + * shorter paths. + * + * * Among cookies that have equal-length path fields, cookies with + * earlier creation-times are listed before cookies with later + * creation-times." + */ + +function cookieCompare(a, b) { + let cmp = 0; + + // descending for length: b CMP a + const aPathLen = a.path ? a.path.length : 0; + const bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; + } + + // ascending for time: a CMP b + const aTime = a.creation ? a.creation.getTime() : MAX_TIME; + const bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; + } + + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; + + return cmp; +} + +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === "/") { + return ["/"]; + } + const permutations = [path]; + while (path.length > 1) { + const lindex = path.lastIndexOf("/"); + if (lindex === 0) { + break; + } + path = path.substr(0, lindex); + permutations.push(path); + } + permutations.push("/"); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } catch (err) { + // Silently swallow error + } + + return urlParse(url); +} + +const cookieDefaults = { + // the order in which the RFC has them: + key: "", + value: "", + expires: "Infinity", + maxAge: null, + domain: null, + path: null, + secure: false, + httpOnly: false, + extensions: null, + // set by the CookieJar: + hostOnly: null, + pathIsDefault: null, + creation: null, + lastAccessed: null, + sameSite: "none" +}; + +class Cookie { + constructor(options = {}) { + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + + Object.assign(this, cookieDefaults, options); + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, "creationIndex", { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); + } + + inspect() { + const now = Date.now(); + const hostOnly = this.hostOnly != null ? this.hostOnly : "?"; + const createAge = this.creation + ? `${now - this.creation.getTime()}ms` + : "?"; + const accessAge = this.lastAccessed + ? `${now - this.lastAccessed.getTime()}ms` + : "?"; + return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`; + } + + toJSON() { + const obj = {}; + + for (const prop of Cookie.serializableProperties) { + if (this[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if ( + prop === "expires" || + prop === "creation" || + prop === "lastAccessed" + ) { + if (this[prop] === null) { + obj[prop] = null; + } else { + obj[prop] = + this[prop] == "Infinity" // intentionally not === + ? "Infinity" + : this[prop].toISOString(); + } + } else if (prop === "maxAge") { + if (this[prop] !== null) { + // again, intentionally not === + obj[prop] = + this[prop] == Infinity || this[prop] == -Infinity + ? this[prop].toString() + : this[prop]; + } + } else { + if (this[prop] !== cookieDefaults[prop]) { + obj[prop] = this[prop]; + } + } + } + + return obj; + } + + clone() { + return fromJSON(this.toJSON()); + } + + validate() { + if (!COOKIE_OCTETS.test(this.value)) { + return false; + } + if ( + this.expires != Infinity && + !(this.expires instanceof Date) && + !parseDate(this.expires) + ) { + return false; + } + if (this.maxAge != null && this.maxAge <= 0) { + return false; // "Max-Age=" non-zero-digit *DIGIT + } + if (this.path != null && !PATH_VALUE.test(this.path)) { + return false; + } + + const cdomain = this.cdomain(); + if (cdomain) { + if (cdomain.match(/\.$/)) { + return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this + } + const suffix = pubsuffix.getPublicSuffix(cdomain); + if (suffix == null) { + // it's a public suffix + return false; + } + } + return true; + } + + setExpires(exp) { + if (exp instanceof Date) { + this.expires = exp; + } else { + this.expires = parseDate(exp) || "Infinity"; + } + } + + setMaxAge(age) { + if (age === Infinity || age === -Infinity) { + this.maxAge = age.toString(); // so JSON.stringify() works + } else { + this.maxAge = age; + } + } + + cookieString() { + let val = this.value; + if (val == null) { + val = ""; + } + if (this.key === "") { + return val; + } + return `${this.key}=${val}`; + } + + // gives Set-Cookie header format + toString() { + let str = this.cookieString(); + + if (this.expires != Infinity) { + if (this.expires instanceof Date) { + str += `; Expires=${formatDate(this.expires)}`; + } else { + str += `; Expires=${this.expires}`; + } + } + + if (this.maxAge != null && this.maxAge != Infinity) { + str += `; Max-Age=${this.maxAge}`; + } + + if (this.domain && !this.hostOnly) { + str += `; Domain=${this.domain}`; + } + if (this.path) { + str += `; Path=${this.path}`; + } + + if (this.secure) { + str += "; Secure"; + } + if (this.httpOnly) { + str += "; HttpOnly"; + } + if (this.sameSite && this.sameSite !== "none") { + const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()]; + str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`; + } + if (this.extensions) { + this.extensions.forEach(ext => { + str += `; ${ext}`; + }); + } + + return str; + } + + // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + // S5.3 says to give the "latest representable date" for which we use Infinity + // For "expired" we use 0 + TTL(now) { + /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires + * attribute, the Max-Age attribute has precedence and controls the + * expiration date of the cookie. + * (Concurs with S5.3 step 3) + */ + if (this.maxAge != null) { + return this.maxAge <= 0 ? 0 : this.maxAge * 1000; + } + + let expires = this.expires; + if (expires != Infinity) { + if (!(expires instanceof Date)) { + expires = parseDate(expires) || Infinity; + } + + if (expires == Infinity) { + return Infinity; + } + + return expires.getTime() - (now || Date.now()); + } + + return Infinity; + } + + // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + expiryTime(now) { + if (this.maxAge != null) { + const relativeTo = now || this.creation || new Date(); + const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000; + return relativeTo.getTime() + age; + } + + if (this.expires == Infinity) { + return Infinity; + } + return this.expires.getTime(); + } + + // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere), except it returns a Date + expiryDate(now) { + const millisec = this.expiryTime(now); + if (millisec == Infinity) { + return new Date(MAX_TIME); + } else if (millisec == -Infinity) { + return new Date(MIN_TIME); + } else { + return new Date(millisec); + } + } + + // This replaces the "persistent-flag" parts of S5.3 step 3 + isPersistent() { + return this.maxAge != null || this.expires != Infinity; + } + + // Mostly S5.1.2 and S5.2.3: + canonicalizedDomain() { + if (this.domain == null) { + return null; + } + return canonicalDomain(this.domain); + } + + cdomain() { + return this.canonicalizedDomain(); + } +} + +Cookie.cookiesCreated = 0; +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; +Cookie.serializableProperties = Object.keys(cookieDefaults); +Cookie.sameSiteLevel = { + strict: 3, + lax: 2, + none: 1 +}; + +Cookie.sameSiteCanonical = { + strict: "Strict", + lax: "Lax" +}; + +function getNormalizedPrefixSecurity(prefixSecurity) { + if (prefixSecurity != null) { + const normalizedPrefixSecurity = prefixSecurity.toLowerCase(); + /* The three supported options */ + switch (normalizedPrefixSecurity) { + case PrefixSecurityEnum.STRICT: + case PrefixSecurityEnum.SILENT: + case PrefixSecurityEnum.DISABLED: + return normalizedPrefixSecurity; + } + } + /* Default is SILENT */ + return PrefixSecurityEnum.SILENT; +} + +class CookieJar { + constructor(store, options = { rejectPublicSuffixes: true }) { + if (typeof options === "boolean") { + options = { rejectPublicSuffixes: options }; + } + this.rejectPublicSuffixes = options.rejectPublicSuffixes; + this.enableLooseMode = !!options.looseMode; + this.allowSpecialUseDomain = !!options.allowSpecialUseDomain; + this.store = store || new MemoryCookieStore(); + this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity); + this._cloneSync = syncWrap("clone"); + this._importCookiesSync = syncWrap("_importCookies"); + this.getCookiesSync = syncWrap("getCookies"); + this.getCookieStringSync = syncWrap("getCookieString"); + this.getSetCookieStringsSync = syncWrap("getSetCookieStrings"); + this.removeAllCookiesSync = syncWrap("removeAllCookies"); + this.setCookieSync = syncWrap("setCookie"); + this.serializeSync = syncWrap("serialize"); + } + + setCookie(cookie, url, options, cb) { + let err; + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const loose = options.loose || this.enableLooseMode; + + let sameSiteContext = null; + if (options.sameSiteContext) { + sameSiteContext = checkSameSiteContext(options.sameSiteContext); + if (!sameSiteContext) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + // S5.3 step 1 + if (typeof cookie === "string" || cookie instanceof String) { + cookie = Cookie.parse(cookie, { loose: loose }); + if (!cookie) { + err = new Error("Cookie failed to parse"); + return cb(options.ignoreError ? null : err); + } + } else if (!(cookie instanceof Cookie)) { + // If you're seeing this error, and are passing in a Cookie object, + // it *might* be a Cookie object from another loaded version of tough-cookie. + err = new Error( + "First argument to setCookie must be a Cookie object or string" + ); + return cb(options.ignoreError ? null : err); + } + + // S5.3 step 2 + const now = options.now || new Date(); // will assign later to save effort in the face of errors + + // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() + + // S5.3 step 4: NOOP; domain is null by default + + // S5.3 step 5: public suffixes + if (this.rejectPublicSuffixes && cookie.domain) { + const suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); + if (suffix == null) { + // e.g. "com" + err = new Error("Cookie has domain set to a public suffix"); + return cb(options.ignoreError ? null : err); + } + } + + // S5.3 step 6: + if (cookie.domain) { + if (!domainMatch(host, cookie.cdomain(), false)) { + err = new Error( + `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}` + ); + return cb(options.ignoreError ? null : err); + } + + if (cookie.hostOnly == null) { + // don't reset if already set + cookie.hostOnly = false; + } + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== "/") { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } + + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute + + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + + // 6252bis-02 S5.4 Step 13 & 14: + if (cookie.sameSite !== "none" && sameSiteContext) { + // "If the cookie's "same-site-flag" is not "None", and the cookie + // is being set from a context whose "site for cookies" is not an + // exact match for request-uri's host's registered domain, then + // abort these steps and ignore the newly created cookie entirely." + if (sameSiteContext === "none") { + err = new Error( + "Cookie is SameSite but this is a cross-origin request" + ); + return cb(options.ignoreError ? null : err); + } + } + + /* 6265bis-02 S5.4 Steps 15 & 16 */ + const ignoreErrorForPrefixSecurity = + this.prefixSecurity === PrefixSecurityEnum.SILENT; + const prefixSecurityDisabled = + this.prefixSecurity === PrefixSecurityEnum.DISABLED; + /* If prefix checking is not disabled ...*/ + if (!prefixSecurityDisabled) { + let errorFound = false; + let errorMsg; + /* Check secure prefix condition */ + if (!isSecurePrefixConditionMet(cookie)) { + errorFound = true; + errorMsg = "Cookie has __Secure prefix but Secure attribute is not set"; + } else if (!isHostPrefixConditionMet(cookie)) { + /* Check host prefix condition */ + errorFound = true; + errorMsg = + "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'"; + } + if (errorFound) { + return cb( + options.ignoreError || ignoreErrorForPrefixSecurity + ? null + : new Error(errorMsg) + ); + } + } + + const store = this.store; + + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } + + function withCookie(err, oldCookie) { + if (err) { + return cb(err); + } + + const next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; + + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { + // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 + } + } + + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); + } + + // RFC6365 S5.4 + getCookies(url, options, cb) { + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const path = context.pathname || "/"; + + let secure = options.secure; + if ( + secure == null && + context.protocol && + (context.protocol == "https:" || context.protocol == "wss:") + ) { + secure = true; + } + + let sameSiteLevel = 0; + if (options.sameSiteContext) { + const sameSiteContext = checkSameSiteContext(options.sameSiteContext); + sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext]; + if (!sameSiteLevel) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + let http = options.http; + if (http == null) { + http = true; + } + + const now = options.now || Date.now(); + const expireCheck = options.expire !== false; + const allPaths = !!options.allPaths; + const store = this.store; + + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } + } + + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; + } + + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; + } + + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; + } + + // RFC6265bis-02 S5.3.7 + if (sameSiteLevel) { + const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"]; + if (cookieLevel > sameSiteLevel) { + // only allow cookies at or below the request level + return false; + } + } + + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored + return false; + } + + return true; + } + + store.findCookies( + host, + allPaths ? null : path, + this.allowSpecialUseDomain, + (err, cookies) => { + if (err) { + return cb(err); + } + + cookies = cookies.filter(matchingCookie); + + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } + + // S5.4 part 3 + const now = new Date(); + for (const cookie of cookies) { + cookie.lastAccessed = now; + } + // TODO persist lastAccessed + + cb(null, cookies); + } + ); + } + + getCookieString(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies + .sort(cookieCompare) + .map(c => c.cookieString()) + .join("; ") + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + getSetCookieStrings(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies.map(c => { + return c.toString(); + }) + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + serialize(cb) { + let type = this.store.constructor.name; + if (type === "Object") { + type = null; + } + + // update README.md "Serialization Format" if you change this, please! + const serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: `tough-cookie@${VERSION}`, + + // add the store type, to make humans happy: + storeType: type, + + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, + + // this gets filled from getAllCookies: + cookies: [] + }; + + if ( + !( + this.store.getAllCookies && + typeof this.store.getAllCookies === "function" + ) + ) { + return cb( + new Error( + "store does not support getAllCookies and cannot be serialized" + ) + ); + } + + this.store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + serialized.cookies = cookies.map(cookie => { + // convert to serialized 'raw' cookies + cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie; + + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; + + return cookie; + }); + + return cb(null, serialized); + }); + } + + toJSON() { + return this.serializeSync(); + } + + // use the class method CookieJar.deserialize instead of calling this directly + _importCookies(serialized, cb) { + let cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error("serialized jar has no cookies array")); + } + cookies = cookies.slice(); // do not modify the original + + const putNext = err => { + if (err) { + return cb(err); + } + + if (!cookies.length) { + return cb(err, this); + } + + let cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } + + if (cookie === null) { + return putNext(null); // skip this cookie + } + + this.store.putCookie(cookie, putNext); + }; + + putNext(); + } + + clone(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } + + this.serialize((err, serialized) => { + if (err) { + return cb(err); + } + CookieJar.deserialize(serialized, newStore, cb); + }); + } + + cloneSync(newStore) { + if (arguments.length === 0) { + return this._cloneSync(); + } + if (!newStore.synchronous) { + throw new Error( + "CookieJar clone destination store is not synchronous; use async API instead." + ); + } + return this._cloneSync(newStore); + } + + removeAllCookies(cb) { + const store = this.store; + + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if ( + typeof store.removeAllCookies === "function" && + store.removeAllCookies !== Store.prototype.removeAllCookies + ) { + return store.removeAllCookies(cb); + } + + store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + if (cookies.length === 0) { + return cb(null); + } + + let completedCount = 0; + const removeErrors = []; + + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } + + completedCount++; + + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } + + cookies.forEach(cookie => { + store.removeCookie( + cookie.domain, + cookie.path, + cookie.key, + removeCookieCb + ); + }); + }); + } + + static deserialize(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; + } + + let serialized; + if (typeof strOrObj === "string") { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); + } + } else { + serialized = strOrObj; + } + + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, err => { + if (err) { + return cb(err); + } + cb(null, jar); + }); + } + + static deserializeSync(strOrObj, store) { + const serialized = + typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj; + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + jar._importCookiesSync(serialized); + return jar; + } +} +CookieJar.fromJSON = CookieJar.deserializeSync; + +[ + "_importCookies", + "clone", + "getCookies", + "getCookieString", + "getSetCookieStrings", + "removeAllCookies", + "serialize", + "setCookie" +].forEach(name => { + CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]); +}); +CookieJar.deserialize = fromCallback(CookieJar.deserialize); + +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function(...args) { + if (!this.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + let syncErr, syncResult; + this[method](...args, (err, result) => { + syncErr = err; + syncResult = result; + }); + + if (syncErr) { + throw syncErr; + } + return syncResult; + }; +} + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = __webpack_require__(89).permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; +exports.PrefixSecurityEnum = PrefixSecurityEnum; + + +/***/ }), +/* 394 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracerProvider = void 0; +var ProxyTracer_1 = __webpack_require__(398); +var NoopTracerProvider_1 = __webpack_require__(162); +var NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version) { + var _a; + return ((_a = this.getDelegateTracer(name, version)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version); + }; + return ProxyTracerProvider; +}()); +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map + +/***/ }), +/* 395 */, +/* 396 */, +/* 397 */, +/* 398 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracer = void 0; +var NoopTracer_1 = __webpack_require__(151); +var NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version) { + this._provider = _provider; + this.name = name; + this.version = version; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map + +/***/ }), +/* 399 */, +/* 400 */, +/* 401 */, +/* 402 */, +/* 403 */, +/* 404 */, +/* 405 */, +/* 406 */, +/* 407 */, +/* 408 */, +/* 409 */, +/* 410 */, +/* 411 */, +/* 412 */, +/* 413 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + +/***/ }), +/* 414 */, +/* 415 */, +/* 416 */, +/* 417 */ +/***/ (function(module) { + +module.exports = require("crypto"); + +/***/ }), +/* 418 */, +/* 419 */, +/* 420 */, +/* 421 */, +/* 422 */, +/* 423 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLCData, XMLComment, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLProcessingInstruction, XMLRaw, XMLText, XMLWriterBase, assign, + hasProp = {}.hasOwnProperty; + + assign = __webpack_require__(582).assign; + + NodeType = __webpack_require__(683); + + XMLDeclaration = __webpack_require__(738); + + XMLDocType = __webpack_require__(735); + + XMLCData = __webpack_require__(657); + + XMLComment = __webpack_require__(919); + + XMLElement = __webpack_require__(796); + + XMLRaw = __webpack_require__(660); + + XMLText = __webpack_require__(708); + + XMLProcessingInstruction = __webpack_require__(491); + + XMLDummy = __webpack_require__(956); + + XMLDTDAttList = __webpack_require__(801); + + XMLDTDElement = __webpack_require__(463); + + XMLDTDEntity = __webpack_require__(661); + + XMLDTDNotation = __webpack_require__(19); + + WriterState = __webpack_require__(541); + + module.exports = XMLWriterBase = (function() { + function XMLWriterBase(options) { + var key, ref, value; + options || (options = {}); + this.options = options; + ref = options.writer || {}; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this["_" + key] = this[key]; + this[key] = value; + } + } + + XMLWriterBase.prototype.filterOptions = function(options) { + var filteredOptions, ref, ref1, ref2, ref3, ref4, ref5, ref6; + options || (options = {}); + options = assign({}, this.options, options); + filteredOptions = { + writer: this + }; + filteredOptions.pretty = options.pretty || false; + filteredOptions.allowEmpty = options.allowEmpty || false; + filteredOptions.indent = (ref = options.indent) != null ? ref : ' '; + filteredOptions.newline = (ref1 = options.newline) != null ? ref1 : '\n'; + filteredOptions.offset = (ref2 = options.offset) != null ? ref2 : 0; + filteredOptions.dontPrettyTextNodes = (ref3 = (ref4 = options.dontPrettyTextNodes) != null ? ref4 : options.dontprettytextnodes) != null ? ref3 : 0; + filteredOptions.spaceBeforeSlash = (ref5 = (ref6 = options.spaceBeforeSlash) != null ? ref6 : options.spacebeforeslash) != null ? ref5 : ''; + if (filteredOptions.spaceBeforeSlash === true) { + filteredOptions.spaceBeforeSlash = ' '; + } + filteredOptions.suppressPrettyCount = 0; + filteredOptions.user = {}; + filteredOptions.state = WriterState.None; + return filteredOptions; + }; + + XMLWriterBase.prototype.indent = function(node, options, level) { + var indentLevel; + if (!options.pretty || options.suppressPrettyCount) { + return ''; + } else if (options.pretty) { + indentLevel = (level || 0) + options.offset + 1; + if (indentLevel > 0) { + return new Array(indentLevel).join(options.indent); + } + } + return ''; + }; + + XMLWriterBase.prototype.endline = function(node, options, level) { + if (!options.pretty || options.suppressPrettyCount) { + return ''; + } else { + return options.newline; + } + }; + + XMLWriterBase.prototype.attribute = function(att, options, level) { + var r; + this.openAttribute(att, options, level); + r = ' ' + att.name + '="' + att.value + '"'; + this.closeAttribute(att, options, level); + return r; + }; + + XMLWriterBase.prototype.cdata = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.comment = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.declaration = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.docType = function(node, options, level) { + var child, i, len, r, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + r += ' 0) { + r += ' ['; + r += this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref = node.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += ']'; + } + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '>'; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, i, j, len, len1, name, prettySuppressed, r, ref, ref1, ref2; + level || (level = 0); + prettySuppressed = false; + r = ''; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r += this.indent(node, options, level) + '<' + node.name; + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + r += this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + r += '>'; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '/>' + this.endline(node, options, level); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + r += '>'; + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + r += this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + if (options.dontPrettyTextNodes) { + ref1 = node.children; + for (i = 0, len = ref1.length; i < len; i++) { + child = ref1[i]; + if ((child.type === NodeType.Text || child.type === NodeType.Raw) && (child.value != null)) { + options.suppressPrettyCount++; + prettySuppressed = true; + break; + } + } + } + r += '>' + this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref2 = node.children; + for (j = 0, len1 = ref2.length; j < len1; j++) { + child = ref2[j]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += this.indent(node, options, level) + ''; + if (prettySuppressed) { + options.suppressPrettyCount--; + } + r += this.endline(node, options, level); + options.state = WriterState.None; + } + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.writeChildNode = function(node, options, level) { + switch (node.type) { + case NodeType.CData: + return this.cdata(node, options, level); + case NodeType.Comment: + return this.comment(node, options, level); + case NodeType.Element: + return this.element(node, options, level); + case NodeType.Raw: + return this.raw(node, options, level); + case NodeType.Text: + return this.text(node, options, level); + case NodeType.ProcessingInstruction: + return this.processingInstruction(node, options, level); + case NodeType.Dummy: + return ''; + case NodeType.Declaration: + return this.declaration(node, options, level); + case NodeType.DocType: + return this.docType(node, options, level); + case NodeType.AttributeDeclaration: + return this.dtdAttList(node, options, level); + case NodeType.ElementDeclaration: + return this.dtdElement(node, options, level); + case NodeType.EntityDeclaration: + return this.dtdEntity(node, options, level); + case NodeType.NotationDeclaration: + return this.dtdNotation(node, options, level); + default: + throw new Error("Unknown XML node type: " + node.constructor.name); + } + }; + + XMLWriterBase.prototype.processingInstruction = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.raw = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.text = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdAttList = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdElement = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdEntity = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdNotation = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.openNode = function(node, options, level) {}; + + XMLWriterBase.prototype.closeNode = function(node, options, level) {}; + + XMLWriterBase.prototype.openAttribute = function(att, options, level) {}; + + XMLWriterBase.prototype.closeAttribute = function(att, options, level) {}; + + return XMLWriterBase; + + })(); + +}).call(this); + + +/***/ }), +/* 424 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var iterate = __webpack_require__(157) + , initState = __webpack_require__(903) + , terminator = __webpack_require__(939) + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} + + +/***/ }), +/* 425 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopContextManager = void 0; +var context_1 = __webpack_require__(132); +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return context_1.ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], args)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map + +/***/ }), +/* 426 */, +/* 427 */, +/* 428 */, +/* 429 */, +/* 430 */, +/* 431 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(82); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), +/* 432 */, +/* 433 */, +/* 434 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const exec_1 = __webpack_require__(986); +const io = __importStar(__webpack_require__(1)); +const fs_1 = __webpack_require__(747); +const path = __importStar(__webpack_require__(622)); +const utils = __importStar(__webpack_require__(15)); +const constants_1 = __webpack_require__(931); +function getTarPath(args, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + switch (process.platform) { + case 'win32': { + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); + } + else if (fs_1.existsSync(systemTar)) { + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); + } + break; + } + case 'darwin': { + const gnuTar = yield io.which('gtar', false); + if (gnuTar) { + // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 + args.push('--delay-directory-restore'); + return gnuTar; + } + break; + } + default: + break; + } + return yield io.which('tar', true); + }); +} +function execTar(args, compressionMethod, cwd) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); + }); +} +exports.extractTar = extractTar; +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -T0 --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -T0']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); + }); +} +exports.createTar = createTar; +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. + // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); + }); +} +exports.listTar = listTar; +//# sourceMappingURL=tar.js.map + +/***/ }), +/* 435 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +var trace_flags_1 = __webpack_require__(975); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map + +/***/ }), +/* 436 */, +/* 437 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NonRecordingSpan = void 0; +var invalid_span_constants_1 = __webpack_require__(435); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map + +/***/ }), +/* 438 */, +/* 439 */, +/* 440 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.diag = exports.propagation = exports.trace = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.baggageEntryMetadataFromString = void 0; +__exportStar(__webpack_require__(880), exports); +var utils_1 = __webpack_require__(112); +Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +__exportStar(__webpack_require__(452), exports); +__exportStar(__webpack_require__(158), exports); +__exportStar(__webpack_require__(893), exports); +__exportStar(__webpack_require__(881), exports); +__exportStar(__webpack_require__(906), exports); +__exportStar(__webpack_require__(95), exports); +__exportStar(__webpack_require__(398), exports); +__exportStar(__webpack_require__(394), exports); +__exportStar(__webpack_require__(781), exports); +__exportStar(__webpack_require__(340), exports); +__exportStar(__webpack_require__(607), exports); +__exportStar(__webpack_require__(670), exports); +__exportStar(__webpack_require__(586), exports); +__exportStar(__webpack_require__(220), exports); +__exportStar(__webpack_require__(932), exports); +__exportStar(__webpack_require__(975), exports); +__exportStar(__webpack_require__(207), exports); +__exportStar(__webpack_require__(694), exports); +__exportStar(__webpack_require__(695), exports); +var spancontext_utils_1 = __webpack_require__(629); +Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } }); +Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); +Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); +var invalid_span_constants_1 = __webpack_require__(435); +Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } }); +Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); +__exportStar(__webpack_require__(132), exports); +__exportStar(__webpack_require__(845), exports); +var context_1 = __webpack_require__(492); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +var trace_1 = __webpack_require__(875); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +var propagation_1 = __webpack_require__(22); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +var diag_1 = __webpack_require__(118); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +exports.default = { + trace: exports.trace, + context: exports.context, + propagation: exports.propagation, + diag: exports.diag, +}; +//# sourceMappingURL=index.js.map + +/***/ }), +/* 441 */, +/* 442 */, +/* 443 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0; +const cache = __importStar(__webpack_require__(692)); +const core = __importStar(__webpack_require__(470)); +const constants_1 = __webpack_require__(196); +function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + return ghUrl.hostname.toUpperCase() !== "GITHUB.COM"; +} +exports.isGhes = isGhes; +function isExactKeyMatch(key, cacheKey) { + return !!(cacheKey && + cacheKey.localeCompare(key, undefined, { + sensitivity: "accent" + }) === 0); +} +exports.isExactKeyMatch = isExactKeyMatch; +function setCacheState(state) { + core.saveState(constants_1.State.CacheMatchedKey, state); +} +exports.setCacheState = setCacheState; +function setCacheHitOutput(isCacheHit) { + core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); +} +exports.setCacheHitOutput = setCacheHitOutput; +function setOutputAndState(key, cacheKey) { + setCacheHitOutput(isExactKeyMatch(key, cacheKey)); + // Store the matched cache key if it exists + cacheKey && setCacheState(cacheKey); +} +exports.setOutputAndState = setOutputAndState; +function getCacheState() { + const cacheKey = core.getState(constants_1.State.CacheMatchedKey); + if (cacheKey) { + core.debug(`Cache state/key: ${cacheKey}`); + return cacheKey; + } + return undefined; +} +exports.getCacheState = getCacheState; +function logWarning(message) { + const warningPrefix = "[warning]"; + core.info(`${warningPrefix}${message}`); +} +exports.logWarning = logWarning; +// Cache token authorized for all events that are tied to a ref +// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context +function isValidEvent() { + return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]); +} +exports.isValidEvent = isValidEvent; +function getInputAsArray(name, options) { + return core + .getInput(name, options) + .split("\n") + .map(s => s.trim()) + .filter(x => x !== ""); +} +exports.getInputAsArray = getInputAsArray; +function getInputAsInt(name, options) { + const value = parseInt(core.getInput(name, options)); + if (isNaN(value) || value < 0) { + return undefined; + } + return value; +} +exports.getInputAsInt = getInputAsInt; +function isCacheFeatureAvailable() { + if (!cache.isFeatureAvailable()) { + if (isGhes()) { + logWarning("Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not."); + } + else { + logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); + } + return false; + } + return true; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), +/* 444 */, +/* 445 */, +/* 446 */, +/* 447 */, +/* 448 */, +/* 449 */, +/* 450 */, +/* 451 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNamedNodeMap; + + module.exports = XMLNamedNodeMap = (function() { + function XMLNamedNodeMap(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNamedNodeMap.prototype, 'length', { + get: function() { + return Object.keys(this.nodes).length || 0; + } + }); + + XMLNamedNodeMap.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNamedNodeMap.prototype.getNamedItem = function(name) { + return this.nodes[name]; + }; + + XMLNamedNodeMap.prototype.setNamedItem = function(node) { + var oldNode; + oldNode = this.nodes[node.nodeName]; + this.nodes[node.nodeName] = node; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.removeNamedItem = function(name) { + var oldNode; + oldNode = this.nodes[name]; + delete this.nodes[name]; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.item = function(index) { + return this.nodes[Object.keys(this.nodes)[index]] || null; + }; + + XMLNamedNodeMap.prototype.getNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.setNamedItemNS = function(node) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.removeNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLNamedNodeMap; + + })(); + +}).call(this); + + +/***/ }), +/* 452 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Exception.js.map + +/***/ }), +/* 453 */, +/* 454 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__webpack_require__(794)); +var http = _interopDefault(__webpack_require__(605)); +var Url = _interopDefault(__webpack_require__(835)); +var whatwgUrl = _interopDefault(__webpack_require__(70)); +var https = _interopDefault(__webpack_require__(211)); +var zlib = _interopDefault(__webpack_require__(761)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = __webpack_require__(18).convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), +/* 455 */, +/* 456 */, +/* 457 */, +/* 458 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLStreamWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = __webpack_require__(683); + + XMLWriterBase = __webpack_require__(423); + + WriterState = __webpack_require__(541); + + module.exports = XMLStreamWriter = (function(superClass) { + extend(XMLStreamWriter, superClass); + + function XMLStreamWriter(stream, options) { + this.stream = stream; + XMLStreamWriter.__super__.constructor.call(this, options); + } + + XMLStreamWriter.prototype.endline = function(node, options, level) { + if (node.isLastRootNode && options.state === WriterState.CloseTag) { + return ''; + } else { + return XMLStreamWriter.__super__.endline.call(this, node, options, level); + } + }; + + XMLStreamWriter.prototype.document = function(doc, options) { + var child, i, j, k, len, len1, ref, ref1, results; + ref = doc.children; + for (i = j = 0, len = ref.length; j < len; i = ++j) { + child = ref[i]; + child.isLastRootNode = i === doc.children.length - 1; + } + options = this.filterOptions(options); + ref1 = doc.children; + results = []; + for (k = 0, len1 = ref1.length; k < len1; k++) { + child = ref1[k]; + results.push(this.writeChildNode(child, options, 0)); + } + return results; + }; + + XMLStreamWriter.prototype.attribute = function(att, options, level) { + return this.stream.write(XMLStreamWriter.__super__.attribute.call(this, att, options, level)); + }; + + XMLStreamWriter.prototype.cdata = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.cdata.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.comment = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.comment.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.declaration = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.declaration.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.docType = function(node, options, level) { + var child, j, len, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level)); + this.stream.write(' 0) { + this.stream.write(' ['); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref = node.children; + for (j = 0, len = ref.length; j < len; j++) { + child = ref[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(']'); + } + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '>'); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, j, len, name, prettySuppressed, ref, ref1; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level) + '<' + node.name); + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + this.stream.write('>'); + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '/>'); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + this.stream.write('>'); + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + this.stream.write('>' + this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref1 = node.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(this.indent(node, options, level) + ''); + } + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.processingInstruction = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.processingInstruction.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.raw = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.raw.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.text = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.text.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdAttList = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdAttList.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdElement = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdElement.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdEntity = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdEntity.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdNotation = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdNotation.call(this, node, options, level)); + }; + + return XMLStreamWriter; + + })(XMLWriterBase); + +}).call(this); + + +/***/ }), +/* 459 */, +/* 460 */ +/***/ (function(module) { + +// generated by genversion +module.exports = '4.0.0' + + +/***/ }), +/* 461 */, +/* 462 */, +/* 463 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDElement, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = __webpack_require__(257); + + NodeType = __webpack_require__(683); + + module.exports = XMLDTDElement = (function(superClass) { + extend(XMLDTDElement, superClass); + + function XMLDTDElement(parent, name, value) { + XMLDTDElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD element name. " + this.debugInfo()); + } + if (!value) { + value = '(#PCDATA)'; + } + if (Array.isArray(value)) { + value = '(' + value.join(',') + ')'; + } + this.name = this.stringify.name(name); + this.type = NodeType.ElementDeclaration; + this.value = this.stringify.dtdElementValue(value); + } + + XMLDTDElement.prototype.toString = function(options) { + return this.options.writer.dtdElement(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDElement; + + })(XMLNode); + +}).call(this); + + +/***/ }), +/* 464 */, +/* 465 */, +/* 466 */, +/* 467 */, +/* 468 */, +/* 469 */, +/* 470 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __webpack_require__(431); +const file_command_1 = __webpack_require__(102); +const utils_1 = __webpack_require__(82); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +const oidc_utils_1 = __webpack_require__(742); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; + file_command_1.issueCommand('ENV', commandValue); + } + else { + command_1.issueCommand('set-env', { name }, convertedVal); + } +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + return inputs; +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Markdown summary exports + */ +var markdown_summary_1 = __webpack_require__(548); +Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return markdown_summary_1.markdownSummary; } }); +//# sourceMappingURL=core.js.map + +/***/ }), +/* 471 */, +/* 472 */, +/* 473 */, +/* 474 */, +/* 475 */, +/* 476 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var builder, defaults, escapeCDATA, requiresCDATA, wrapCDATA, + hasProp = {}.hasOwnProperty; + + builder = __webpack_require__(312); + + defaults = __webpack_require__(791).defaults; + + requiresCDATA = function(entry) { + return typeof entry === "string" && (entry.indexOf('&') >= 0 || entry.indexOf('>') >= 0 || entry.indexOf('<') >= 0); + }; + + wrapCDATA = function(entry) { + return ""; + }; + + escapeCDATA = function(entry) { + return entry.replace(']]>', ']]]]>'); + }; + + exports.Builder = (function() { + function Builder(opts) { + var key, ref, value; + this.options = {}; + ref = defaults["0.2"]; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this.options[key] = value; + } + for (key in opts) { + if (!hasProp.call(opts, key)) continue; + value = opts[key]; + this.options[key] = value; + } + } + + Builder.prototype.buildObject = function(rootObj) { + var attrkey, charkey, render, rootElement, rootName; + attrkey = this.options.attrkey; + charkey = this.options.charkey; + if ((Object.keys(rootObj).length === 1) && (this.options.rootName === defaults['0.2'].rootName)) { + rootName = Object.keys(rootObj)[0]; + rootObj = rootObj[rootName]; + } else { + rootName = this.options.rootName; + } + render = (function(_this) { + return function(element, obj) { + var attr, child, entry, index, key, value; + if (typeof obj !== 'object') { + if (_this.options.cdata && requiresCDATA(obj)) { + element.raw(wrapCDATA(obj)); + } else { + element.txt(obj); + } + } else if (Array.isArray(obj)) { + for (index in obj) { + if (!hasProp.call(obj, index)) continue; + child = obj[index]; + for (key in child) { + entry = child[key]; + element = render(element.ele(key), entry).up(); + } + } + } else { + for (key in obj) { + if (!hasProp.call(obj, key)) continue; + child = obj[key]; + if (key === attrkey) { + if (typeof child === "object") { + for (attr in child) { + value = child[attr]; + element = element.att(attr, value); + } + } + } else if (key === charkey) { + if (_this.options.cdata && requiresCDATA(child)) { + element = element.raw(wrapCDATA(child)); + } else { + element = element.txt(child); + } + } else if (Array.isArray(child)) { + for (index in child) { + if (!hasProp.call(child, index)) continue; + entry = child[index]; + if (typeof entry === 'string') { + if (_this.options.cdata && requiresCDATA(entry)) { + element = element.ele(key).raw(wrapCDATA(entry)).up(); + } else { + element = element.ele(key, entry).up(); + } + } else { + element = render(element.ele(key), entry).up(); + } + } + } else if (typeof child === "object") { + element = render(element.ele(key), child).up(); + } else { + if (typeof child === 'string' && _this.options.cdata && requiresCDATA(child)) { + element = element.ele(key).raw(wrapCDATA(child)).up(); + } else { + if (child == null) { + child = ''; + } + element = element.ele(key, child.toString()).up(); + } + } + } + } + return element; + }; + })(this); + rootElement = builder.create(rootName, this.options.xmldec, this.options.doctype, { + headless: this.options.headless, + allowSurrogateChars: this.options.allowSurrogateChars + }); + return render(rootElement, rootObj).end(this.options.renderOpts); + }; + + return Builder; + + })(); + +}).call(this); + + +/***/ }), +/* 477 */, +/* 478 */, +/* 479 */, +/* 480 */, +/* 481 */, +/* 482 */ +/***/ (function(module) { + +module.exports = [[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]; + +/***/ }), +/* 483 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deleteBaggage = exports.setBaggage = exports.getBaggage = void 0; +var context_1 = __webpack_require__(132); +/** + * Baggage key + */ +var BAGGAGE_KEY = context_1.createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map + +/***/ }), +/* 484 */, +/* 485 */, +/* 486 */, +/* 487 */, +/* 488 */, +/* 489 */, +/* 490 */, +/* 491 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCharacterData, XMLProcessingInstruction, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = __webpack_require__(683); + + XMLCharacterData = __webpack_require__(639); + + module.exports = XMLProcessingInstruction = (function(superClass) { + extend(XMLProcessingInstruction, superClass); + + function XMLProcessingInstruction(parent, target, value) { + XMLProcessingInstruction.__super__.constructor.call(this, parent); + if (target == null) { + throw new Error("Missing instruction target. " + this.debugInfo()); + } + this.type = NodeType.ProcessingInstruction; + this.target = this.stringify.insTarget(target); + this.name = this.target; + if (value) { + this.value = this.stringify.insValue(value); + } + } + + XMLProcessingInstruction.prototype.clone = function() { + return Object.create(this); + }; + + XMLProcessingInstruction.prototype.toString = function(options) { + return this.options.writer.processingInstruction(this, this.options.writer.filterOptions(options)); + }; + + XMLProcessingInstruction.prototype.isEqualNode = function(node) { + if (!XMLProcessingInstruction.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.target !== this.target) { + return false; + } + return true; + }; + + return XMLProcessingInstruction; + + })(XMLCharacterData); + +}).call(this); + + +/***/ }), +/* 492 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ContextAPI = void 0; +var NoopContextManager_1 = __webpack_require__(425); +var global_utils_1 = __webpack_require__(525); +var diag_1 = __webpack_require__(118); +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return global_utils_1.registerGlobal(API_NAME, contextManager, diag_1.DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + return ContextAPI; +}()); +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map + +/***/ }), +/* 493 */, +/* 494 */, +/* 495 */, +/* 496 */, +/* 497 */, +/* 498 */, +/* 499 */, +/* 500 */ +/***/ (function(module) { + +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} + + +/***/ }), +/* 501 */, +/* 502 */, +/* 503 */, +/* 504 */, +/* 505 */, +/* 506 */, +/* 507 */, +/* 508 */, +/* 509 */, +/* 510 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +__webpack_require__(97); +var tslib = __webpack_require__(671); + +// Copyright (c) Microsoft Corporation. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +function getPagedAsyncIterator(pagedResult) { + var _a; + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + return getPageAsyncIterator(pagedResult, settings === null || settings === void 0 ? void 0 : settings.maxPageSize); + }), + }; +} +function getItemAsyncIterator(pagedResult, maxPageSize) { + return tslib.__asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var e_1, _a; + const pages = getPageAsyncIterator(pagedResult, maxPageSize); + const firstVal = yield tslib.__await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + yield yield tslib.__await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(pages))); + } + else { + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(firstVal.value))); + try { + for (var pages_1 = tslib.__asyncValues(pages), pages_1_1; pages_1_1 = yield tslib.__await(pages_1.next()), !pages_1_1.done;) { + const page = pages_1_1.value; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(page))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield tslib.__await(_a.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); +} +function getPageAsyncIterator(pagedResult, maxPageSize) { + return tslib.__asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + let response = yield tslib.__await(pagedResult.getPage(pagedResult.firstPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + while (response.nextPageLink) { + response = yield tslib.__await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + } + }); +} + +exports.getPagedAsyncIterator = getPagedAsyncIterator; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 511 */, +/* 512 */ +/***/ (function(module) { + +module.exports = {"application/1d-interleaved-parityfec":{"source":"iana"},"application/3gpdash-qoe-report+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/3gpp-ims+xml":{"source":"iana","compressible":true},"application/3gpphal+json":{"source":"iana","compressible":true},"application/3gpphalforms+json":{"source":"iana","compressible":true},"application/a2l":{"source":"iana"},"application/ace+cbor":{"source":"iana"},"application/activemessage":{"source":"iana"},"application/activity+json":{"source":"iana","compressible":true},"application/alto-costmap+json":{"source":"iana","compressible":true},"application/alto-costmapfilter+json":{"source":"iana","compressible":true},"application/alto-directory+json":{"source":"iana","compressible":true},"application/alto-endpointcost+json":{"source":"iana","compressible":true},"application/alto-endpointcostparams+json":{"source":"iana","compressible":true},"application/alto-endpointprop+json":{"source":"iana","compressible":true},"application/alto-endpointpropparams+json":{"source":"iana","compressible":true},"application/alto-error+json":{"source":"iana","compressible":true},"application/alto-networkmap+json":{"source":"iana","compressible":true},"application/alto-networkmapfilter+json":{"source":"iana","compressible":true},"application/alto-updatestreamcontrol+json":{"source":"iana","compressible":true},"application/alto-updatestreamparams+json":{"source":"iana","compressible":true},"application/aml":{"source":"iana"},"application/andrew-inset":{"source":"iana","extensions":["ez"]},"application/applefile":{"source":"iana"},"application/applixware":{"source":"apache","extensions":["aw"]},"application/at+jwt":{"source":"iana"},"application/atf":{"source":"iana"},"application/atfx":{"source":"iana"},"application/atom+xml":{"source":"iana","compressible":true,"extensions":["atom"]},"application/atomcat+xml":{"source":"iana","compressible":true,"extensions":["atomcat"]},"application/atomdeleted+xml":{"source":"iana","compressible":true,"extensions":["atomdeleted"]},"application/atomicmail":{"source":"iana"},"application/atomsvc+xml":{"source":"iana","compressible":true,"extensions":["atomsvc"]},"application/atsc-dwd+xml":{"source":"iana","compressible":true,"extensions":["dwd"]},"application/atsc-dynamic-event-message":{"source":"iana"},"application/atsc-held+xml":{"source":"iana","compressible":true,"extensions":["held"]},"application/atsc-rdt+json":{"source":"iana","compressible":true},"application/atsc-rsat+xml":{"source":"iana","compressible":true,"extensions":["rsat"]},"application/atxml":{"source":"iana"},"application/auth-policy+xml":{"source":"iana","compressible":true},"application/bacnet-xdd+zip":{"source":"iana","compressible":false},"application/batch-smtp":{"source":"iana"},"application/bdoc":{"compressible":false,"extensions":["bdoc"]},"application/beep+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/calendar+json":{"source":"iana","compressible":true},"application/calendar+xml":{"source":"iana","compressible":true,"extensions":["xcs"]},"application/call-completion":{"source":"iana"},"application/cals-1840":{"source":"iana"},"application/captive+json":{"source":"iana","compressible":true},"application/cbor":{"source":"iana"},"application/cbor-seq":{"source":"iana"},"application/cccex":{"source":"iana"},"application/ccmp+xml":{"source":"iana","compressible":true},"application/ccxml+xml":{"source":"iana","compressible":true,"extensions":["ccxml"]},"application/cdfx+xml":{"source":"iana","compressible":true,"extensions":["cdfx"]},"application/cdmi-capability":{"source":"iana","extensions":["cdmia"]},"application/cdmi-container":{"source":"iana","extensions":["cdmic"]},"application/cdmi-domain":{"source":"iana","extensions":["cdmid"]},"application/cdmi-object":{"source":"iana","extensions":["cdmio"]},"application/cdmi-queue":{"source":"iana","extensions":["cdmiq"]},"application/cdni":{"source":"iana"},"application/cea":{"source":"iana"},"application/cea-2018+xml":{"source":"iana","compressible":true},"application/cellml+xml":{"source":"iana","compressible":true},"application/cfw":{"source":"iana"},"application/clr":{"source":"iana"},"application/clue+xml":{"source":"iana","compressible":true},"application/clue_info+xml":{"source":"iana","compressible":true},"application/cms":{"source":"iana"},"application/cnrp+xml":{"source":"iana","compressible":true},"application/coap-group+json":{"source":"iana","compressible":true},"application/coap-payload":{"source":"iana"},"application/commonground":{"source":"iana"},"application/conference-info+xml":{"source":"iana","compressible":true},"application/cose":{"source":"iana"},"application/cose-key":{"source":"iana"},"application/cose-key-set":{"source":"iana"},"application/cpl+xml":{"source":"iana","compressible":true},"application/csrattrs":{"source":"iana"},"application/csta+xml":{"source":"iana","compressible":true},"application/cstadata+xml":{"source":"iana","compressible":true},"application/csvm+json":{"source":"iana","compressible":true},"application/cu-seeme":{"source":"apache","extensions":["cu"]},"application/cwt":{"source":"iana"},"application/cybercash":{"source":"iana"},"application/dart":{"compressible":true},"application/dash+xml":{"source":"iana","compressible":true,"extensions":["mpd"]},"application/dashdelta":{"source":"iana"},"application/davmount+xml":{"source":"iana","compressible":true,"extensions":["davmount"]},"application/dca-rft":{"source":"iana"},"application/dcd":{"source":"iana"},"application/dec-dx":{"source":"iana"},"application/dialog-info+xml":{"source":"iana","compressible":true},"application/dicom":{"source":"iana"},"application/dicom+json":{"source":"iana","compressible":true},"application/dicom+xml":{"source":"iana","compressible":true},"application/dii":{"source":"iana"},"application/dit":{"source":"iana"},"application/dns":{"source":"iana"},"application/dns+json":{"source":"iana","compressible":true},"application/dns-message":{"source":"iana"},"application/docbook+xml":{"source":"apache","compressible":true,"extensions":["dbk"]},"application/dots+cbor":{"source":"iana"},"application/dskpp+xml":{"source":"iana","compressible":true},"application/dssc+der":{"source":"iana","extensions":["dssc"]},"application/dssc+xml":{"source":"iana","compressible":true,"extensions":["xdssc"]},"application/dvcs":{"source":"iana"},"application/ecmascript":{"source":"iana","compressible":true,"extensions":["es","ecma"]},"application/edi-consent":{"source":"iana"},"application/edi-x12":{"source":"iana","compressible":false},"application/edifact":{"source":"iana","compressible":false},"application/efi":{"source":"iana"},"application/elm+json":{"source":"iana","charset":"UTF-8","compressible":true},"application/elm+xml":{"source":"iana","compressible":true},"application/emergencycalldata.cap+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/emergencycalldata.comment+xml":{"source":"iana","compressible":true},"application/emergencycalldata.control+xml":{"source":"iana","compressible":true},"application/emergencycalldata.deviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.ecall.msd":{"source":"iana"},"application/emergencycalldata.providerinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.serviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.subscriberinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.veds+xml":{"source":"iana","compressible":true},"application/emma+xml":{"source":"iana","compressible":true,"extensions":["emma"]},"application/emotionml+xml":{"source":"iana","compressible":true,"extensions":["emotionml"]},"application/encaprtp":{"source":"iana"},"application/epp+xml":{"source":"iana","compressible":true},"application/epub+zip":{"source":"iana","compressible":false,"extensions":["epub"]},"application/eshop":{"source":"iana"},"application/exi":{"source":"iana","extensions":["exi"]},"application/expect-ct-report+json":{"source":"iana","compressible":true},"application/express":{"source":"iana","extensions":["exp"]},"application/fastinfoset":{"source":"iana"},"application/fastsoap":{"source":"iana"},"application/fdt+xml":{"source":"iana","compressible":true,"extensions":["fdt"]},"application/fhir+json":{"source":"iana","charset":"UTF-8","compressible":true},"application/fhir+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/fido.trusted-apps+json":{"compressible":true},"application/fits":{"source":"iana"},"application/flexfec":{"source":"iana"},"application/font-sfnt":{"source":"iana"},"application/font-tdpfr":{"source":"iana","extensions":["pfr"]},"application/font-woff":{"source":"iana","compressible":false},"application/framework-attributes+xml":{"source":"iana","compressible":true},"application/geo+json":{"source":"iana","compressible":true,"extensions":["geojson"]},"application/geo+json-seq":{"source":"iana"},"application/geopackage+sqlite3":{"source":"iana"},"application/geoxacml+xml":{"source":"iana","compressible":true},"application/gltf-buffer":{"source":"iana"},"application/gml+xml":{"source":"iana","compressible":true,"extensions":["gml"]},"application/gpx+xml":{"source":"apache","compressible":true,"extensions":["gpx"]},"application/gxf":{"source":"apache","extensions":["gxf"]},"application/gzip":{"source":"iana","compressible":false,"extensions":["gz"]},"application/h224":{"source":"iana"},"application/held+xml":{"source":"iana","compressible":true},"application/hjson":{"extensions":["hjson"]},"application/http":{"source":"iana"},"application/hyperstudio":{"source":"iana","extensions":["stk"]},"application/ibe-key-request+xml":{"source":"iana","compressible":true},"application/ibe-pkg-reply+xml":{"source":"iana","compressible":true},"application/ibe-pp-data":{"source":"iana"},"application/iges":{"source":"iana"},"application/im-iscomposing+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/index":{"source":"iana"},"application/index.cmd":{"source":"iana"},"application/index.obj":{"source":"iana"},"application/index.response":{"source":"iana"},"application/index.vnd":{"source":"iana"},"application/inkml+xml":{"source":"iana","compressible":true,"extensions":["ink","inkml"]},"application/iotp":{"source":"iana"},"application/ipfix":{"source":"iana","extensions":["ipfix"]},"application/ipp":{"source":"iana"},"application/isup":{"source":"iana"},"application/its+xml":{"source":"iana","compressible":true,"extensions":["its"]},"application/java-archive":{"source":"apache","compressible":false,"extensions":["jar","war","ear"]},"application/java-serialized-object":{"source":"apache","compressible":false,"extensions":["ser"]},"application/java-vm":{"source":"apache","compressible":false,"extensions":["class"]},"application/javascript":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["js","mjs"]},"application/jf2feed+json":{"source":"iana","compressible":true},"application/jose":{"source":"iana"},"application/jose+json":{"source":"iana","compressible":true},"application/jrd+json":{"source":"iana","compressible":true},"application/jscalendar+json":{"source":"iana","compressible":true},"application/json":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["json","map"]},"application/json-patch+json":{"source":"iana","compressible":true},"application/json-seq":{"source":"iana"},"application/json5":{"extensions":["json5"]},"application/jsonml+json":{"source":"apache","compressible":true,"extensions":["jsonml"]},"application/jwk+json":{"source":"iana","compressible":true},"application/jwk-set+json":{"source":"iana","compressible":true},"application/jwt":{"source":"iana"},"application/kpml-request+xml":{"source":"iana","compressible":true},"application/kpml-response+xml":{"source":"iana","compressible":true},"application/ld+json":{"source":"iana","compressible":true,"extensions":["jsonld"]},"application/lgr+xml":{"source":"iana","compressible":true,"extensions":["lgr"]},"application/link-format":{"source":"iana"},"application/load-control+xml":{"source":"iana","compressible":true},"application/lost+xml":{"source":"iana","compressible":true,"extensions":["lostxml"]},"application/lostsync+xml":{"source":"iana","compressible":true},"application/lpf+zip":{"source":"iana","compressible":false},"application/lxf":{"source":"iana"},"application/mac-binhex40":{"source":"iana","extensions":["hqx"]},"application/mac-compactpro":{"source":"apache","extensions":["cpt"]},"application/macwriteii":{"source":"iana"},"application/mads+xml":{"source":"iana","compressible":true,"extensions":["mads"]},"application/manifest+json":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["webmanifest"]},"application/marc":{"source":"iana","extensions":["mrc"]},"application/marcxml+xml":{"source":"iana","compressible":true,"extensions":["mrcx"]},"application/mathematica":{"source":"iana","extensions":["ma","nb","mb"]},"application/mathml+xml":{"source":"iana","compressible":true,"extensions":["mathml"]},"application/mathml-content+xml":{"source":"iana","compressible":true},"application/mathml-presentation+xml":{"source":"iana","compressible":true},"application/mbms-associated-procedure-description+xml":{"source":"iana","compressible":true},"application/mbms-deregister+xml":{"source":"iana","compressible":true},"application/mbms-envelope+xml":{"source":"iana","compressible":true},"application/mbms-msk+xml":{"source":"iana","compressible":true},"application/mbms-msk-response+xml":{"source":"iana","compressible":true},"application/mbms-protection-description+xml":{"source":"iana","compressible":true},"application/mbms-reception-report+xml":{"source":"iana","compressible":true},"application/mbms-register+xml":{"source":"iana","compressible":true},"application/mbms-register-response+xml":{"source":"iana","compressible":true},"application/mbms-schedule+xml":{"source":"iana","compressible":true},"application/mbms-user-service-description+xml":{"source":"iana","compressible":true},"application/mbox":{"source":"iana","extensions":["mbox"]},"application/media-policy-dataset+xml":{"source":"iana","compressible":true},"application/media_control+xml":{"source":"iana","compressible":true},"application/mediaservercontrol+xml":{"source":"iana","compressible":true,"extensions":["mscml"]},"application/merge-patch+json":{"source":"iana","compressible":true},"application/metalink+xml":{"source":"apache","compressible":true,"extensions":["metalink"]},"application/metalink4+xml":{"source":"iana","compressible":true,"extensions":["meta4"]},"application/mets+xml":{"source":"iana","compressible":true,"extensions":["mets"]},"application/mf4":{"source":"iana"},"application/mikey":{"source":"iana"},"application/mipc":{"source":"iana"},"application/missing-blocks+cbor-seq":{"source":"iana"},"application/mmt-aei+xml":{"source":"iana","compressible":true,"extensions":["maei"]},"application/mmt-usd+xml":{"source":"iana","compressible":true,"extensions":["musd"]},"application/mods+xml":{"source":"iana","compressible":true,"extensions":["mods"]},"application/moss-keys":{"source":"iana"},"application/moss-signature":{"source":"iana"},"application/mosskey-data":{"source":"iana"},"application/mosskey-request":{"source":"iana"},"application/mp21":{"source":"iana","extensions":["m21","mp21"]},"application/mp4":{"source":"iana","extensions":["mp4s","m4p"]},"application/mpeg4-generic":{"source":"iana"},"application/mpeg4-iod":{"source":"iana"},"application/mpeg4-iod-xmt":{"source":"iana"},"application/mrb-consumer+xml":{"source":"iana","compressible":true},"application/mrb-publish+xml":{"source":"iana","compressible":true},"application/msc-ivr+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/msc-mixer+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/msword":{"source":"iana","compressible":false,"extensions":["doc","dot"]},"application/mud+json":{"source":"iana","compressible":true},"application/multipart-core":{"source":"iana"},"application/mxf":{"source":"iana","extensions":["mxf"]},"application/n-quads":{"source":"iana","extensions":["nq"]},"application/n-triples":{"source":"iana","extensions":["nt"]},"application/nasdata":{"source":"iana"},"application/news-checkgroups":{"source":"iana","charset":"US-ASCII"},"application/news-groupinfo":{"source":"iana","charset":"US-ASCII"},"application/news-transmission":{"source":"iana"},"application/nlsml+xml":{"source":"iana","compressible":true},"application/node":{"source":"iana","extensions":["cjs"]},"application/nss":{"source":"iana"},"application/oauth-authz-req+jwt":{"source":"iana"},"application/ocsp-request":{"source":"iana"},"application/ocsp-response":{"source":"iana"},"application/octet-stream":{"source":"iana","compressible":false,"extensions":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"]},"application/oda":{"source":"iana","extensions":["oda"]},"application/odm+xml":{"source":"iana","compressible":true},"application/odx":{"source":"iana"},"application/oebps-package+xml":{"source":"iana","compressible":true,"extensions":["opf"]},"application/ogg":{"source":"iana","compressible":false,"extensions":["ogx"]},"application/omdoc+xml":{"source":"apache","compressible":true,"extensions":["omdoc"]},"application/onenote":{"source":"apache","extensions":["onetoc","onetoc2","onetmp","onepkg"]},"application/opc-nodeset+xml":{"source":"iana","compressible":true},"application/oscore":{"source":"iana"},"application/oxps":{"source":"iana","extensions":["oxps"]},"application/p21":{"source":"iana"},"application/p21+zip":{"source":"iana","compressible":false},"application/p2p-overlay+xml":{"source":"iana","compressible":true,"extensions":["relo"]},"application/parityfec":{"source":"iana"},"application/passport":{"source":"iana"},"application/patch-ops-error+xml":{"source":"iana","compressible":true,"extensions":["xer"]},"application/pdf":{"source":"iana","compressible":false,"extensions":["pdf"]},"application/pdx":{"source":"iana"},"application/pem-certificate-chain":{"source":"iana"},"application/pgp-encrypted":{"source":"iana","compressible":false,"extensions":["pgp"]},"application/pgp-keys":{"source":"iana"},"application/pgp-signature":{"source":"iana","extensions":["asc","sig"]},"application/pics-rules":{"source":"apache","extensions":["prf"]},"application/pidf+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/pidf-diff+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/pkcs10":{"source":"iana","extensions":["p10"]},"application/pkcs12":{"source":"iana"},"application/pkcs7-mime":{"source":"iana","extensions":["p7m","p7c"]},"application/pkcs7-signature":{"source":"iana","extensions":["p7s"]},"application/pkcs8":{"source":"iana","extensions":["p8"]},"application/pkcs8-encrypted":{"source":"iana"},"application/pkix-attr-cert":{"source":"iana","extensions":["ac"]},"application/pkix-cert":{"source":"iana","extensions":["cer"]},"application/pkix-crl":{"source":"iana","extensions":["crl"]},"application/pkix-pkipath":{"source":"iana","extensions":["pkipath"]},"application/pkixcmp":{"source":"iana","extensions":["pki"]},"application/pls+xml":{"source":"iana","compressible":true,"extensions":["pls"]},"application/poc-settings+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/postscript":{"source":"iana","compressible":true,"extensions":["ai","eps","ps"]},"application/ppsp-tracker+json":{"source":"iana","compressible":true},"application/problem+json":{"source":"iana","compressible":true},"application/problem+xml":{"source":"iana","compressible":true},"application/provenance+xml":{"source":"iana","compressible":true,"extensions":["provx"]},"application/prs.alvestrand.titrax-sheet":{"source":"iana"},"application/prs.cww":{"source":"iana","extensions":["cww"]},"application/prs.cyn":{"source":"iana","charset":"7-BIT"},"application/prs.hpub+zip":{"source":"iana","compressible":false},"application/prs.nprend":{"source":"iana"},"application/prs.plucker":{"source":"iana"},"application/prs.rdf-xml-crypt":{"source":"iana"},"application/prs.xsf+xml":{"source":"iana","compressible":true},"application/pskc+xml":{"source":"iana","compressible":true,"extensions":["pskcxml"]},"application/pvd+json":{"source":"iana","compressible":true},"application/qsig":{"source":"iana"},"application/raml+yaml":{"compressible":true,"extensions":["raml"]},"application/raptorfec":{"source":"iana"},"application/rdap+json":{"source":"iana","compressible":true},"application/rdf+xml":{"source":"iana","compressible":true,"extensions":["rdf","owl"]},"application/reginfo+xml":{"source":"iana","compressible":true,"extensions":["rif"]},"application/relax-ng-compact-syntax":{"source":"iana","extensions":["rnc"]},"application/remote-printing":{"source":"iana"},"application/reputon+json":{"source":"iana","compressible":true},"application/resource-lists+xml":{"source":"iana","compressible":true,"extensions":["rl"]},"application/resource-lists-diff+xml":{"source":"iana","compressible":true,"extensions":["rld"]},"application/rfc+xml":{"source":"iana","compressible":true},"application/riscos":{"source":"iana"},"application/rlmi+xml":{"source":"iana","compressible":true},"application/rls-services+xml":{"source":"iana","compressible":true,"extensions":["rs"]},"application/route-apd+xml":{"source":"iana","compressible":true,"extensions":["rapd"]},"application/route-s-tsid+xml":{"source":"iana","compressible":true,"extensions":["sls"]},"application/route-usd+xml":{"source":"iana","compressible":true,"extensions":["rusd"]},"application/rpki-ghostbusters":{"source":"iana","extensions":["gbr"]},"application/rpki-manifest":{"source":"iana","extensions":["mft"]},"application/rpki-publication":{"source":"iana"},"application/rpki-roa":{"source":"iana","extensions":["roa"]},"application/rpki-updown":{"source":"iana"},"application/rsd+xml":{"source":"apache","compressible":true,"extensions":["rsd"]},"application/rss+xml":{"source":"apache","compressible":true,"extensions":["rss"]},"application/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"application/rtploopback":{"source":"iana"},"application/rtx":{"source":"iana"},"application/samlassertion+xml":{"source":"iana","compressible":true},"application/samlmetadata+xml":{"source":"iana","compressible":true},"application/sarif+json":{"source":"iana","compressible":true},"application/sarif-external-properties+json":{"source":"iana","compressible":true},"application/sbe":{"source":"iana"},"application/sbml+xml":{"source":"iana","compressible":true,"extensions":["sbml"]},"application/scaip+xml":{"source":"iana","compressible":true},"application/scim+json":{"source":"iana","compressible":true},"application/scvp-cv-request":{"source":"iana","extensions":["scq"]},"application/scvp-cv-response":{"source":"iana","extensions":["scs"]},"application/scvp-vp-request":{"source":"iana","extensions":["spq"]},"application/scvp-vp-response":{"source":"iana","extensions":["spp"]},"application/sdp":{"source":"iana","extensions":["sdp"]},"application/secevent+jwt":{"source":"iana"},"application/senml+cbor":{"source":"iana"},"application/senml+json":{"source":"iana","compressible":true},"application/senml+xml":{"source":"iana","compressible":true,"extensions":["senmlx"]},"application/senml-etch+cbor":{"source":"iana"},"application/senml-etch+json":{"source":"iana","compressible":true},"application/senml-exi":{"source":"iana"},"application/sensml+cbor":{"source":"iana"},"application/sensml+json":{"source":"iana","compressible":true},"application/sensml+xml":{"source":"iana","compressible":true,"extensions":["sensmlx"]},"application/sensml-exi":{"source":"iana"},"application/sep+xml":{"source":"iana","compressible":true},"application/sep-exi":{"source":"iana"},"application/session-info":{"source":"iana"},"application/set-payment":{"source":"iana"},"application/set-payment-initiation":{"source":"iana","extensions":["setpay"]},"application/set-registration":{"source":"iana"},"application/set-registration-initiation":{"source":"iana","extensions":["setreg"]},"application/sgml":{"source":"iana"},"application/sgml-open-catalog":{"source":"iana"},"application/shf+xml":{"source":"iana","compressible":true,"extensions":["shf"]},"application/sieve":{"source":"iana","extensions":["siv","sieve"]},"application/simple-filter+xml":{"source":"iana","compressible":true},"application/simple-message-summary":{"source":"iana"},"application/simplesymbolcontainer":{"source":"iana"},"application/sipc":{"source":"iana"},"application/slate":{"source":"iana"},"application/smil":{"source":"iana"},"application/smil+xml":{"source":"iana","compressible":true,"extensions":["smi","smil"]},"application/smpte336m":{"source":"iana"},"application/soap+fastinfoset":{"source":"iana"},"application/soap+xml":{"source":"iana","compressible":true},"application/sparql-query":{"source":"iana","extensions":["rq"]},"application/sparql-results+xml":{"source":"iana","compressible":true,"extensions":["srx"]},"application/spdx+json":{"source":"iana","compressible":true},"application/spirits-event+xml":{"source":"iana","compressible":true},"application/sql":{"source":"iana"},"application/srgs":{"source":"iana","extensions":["gram"]},"application/srgs+xml":{"source":"iana","compressible":true,"extensions":["grxml"]},"application/sru+xml":{"source":"iana","compressible":true,"extensions":["sru"]},"application/ssdl+xml":{"source":"apache","compressible":true,"extensions":["ssdl"]},"application/ssml+xml":{"source":"iana","compressible":true,"extensions":["ssml"]},"application/stix+json":{"source":"iana","compressible":true},"application/swid+xml":{"source":"iana","compressible":true,"extensions":["swidtag"]},"application/tamp-apex-update":{"source":"iana"},"application/tamp-apex-update-confirm":{"source":"iana"},"application/tamp-community-update":{"source":"iana"},"application/tamp-community-update-confirm":{"source":"iana"},"application/tamp-error":{"source":"iana"},"application/tamp-sequence-adjust":{"source":"iana"},"application/tamp-sequence-adjust-confirm":{"source":"iana"},"application/tamp-status-query":{"source":"iana"},"application/tamp-status-response":{"source":"iana"},"application/tamp-update":{"source":"iana"},"application/tamp-update-confirm":{"source":"iana"},"application/tar":{"compressible":true},"application/taxii+json":{"source":"iana","compressible":true},"application/td+json":{"source":"iana","compressible":true},"application/tei+xml":{"source":"iana","compressible":true,"extensions":["tei","teicorpus"]},"application/tetra_isi":{"source":"iana"},"application/thraud+xml":{"source":"iana","compressible":true,"extensions":["tfi"]},"application/timestamp-query":{"source":"iana"},"application/timestamp-reply":{"source":"iana"},"application/timestamped-data":{"source":"iana","extensions":["tsd"]},"application/tlsrpt+gzip":{"source":"iana"},"application/tlsrpt+json":{"source":"iana","compressible":true},"application/tnauthlist":{"source":"iana"},"application/token-introspection+jwt":{"source":"iana"},"application/toml":{"compressible":true,"extensions":["toml"]},"application/trickle-ice-sdpfrag":{"source":"iana"},"application/trig":{"source":"iana","extensions":["trig"]},"application/ttml+xml":{"source":"iana","compressible":true,"extensions":["ttml"]},"application/tve-trigger":{"source":"iana"},"application/tzif":{"source":"iana"},"application/tzif-leap":{"source":"iana"},"application/ubjson":{"compressible":false,"extensions":["ubj"]},"application/ulpfec":{"source":"iana"},"application/urc-grpsheet+xml":{"source":"iana","compressible":true},"application/urc-ressheet+xml":{"source":"iana","compressible":true,"extensions":["rsheet"]},"application/urc-targetdesc+xml":{"source":"iana","compressible":true,"extensions":["td"]},"application/urc-uisocketdesc+xml":{"source":"iana","compressible":true},"application/vcard+json":{"source":"iana","compressible":true},"application/vcard+xml":{"source":"iana","compressible":true},"application/vemmi":{"source":"iana"},"application/vividence.scriptfile":{"source":"apache"},"application/vnd.1000minds.decision-model+xml":{"source":"iana","compressible":true,"extensions":["1km"]},"application/vnd.3gpp-prose+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-prose-pc3ch+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-v2x-local-service-information":{"source":"iana"},"application/vnd.3gpp.5gnas":{"source":"iana"},"application/vnd.3gpp.access-transfer-events+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.bsf+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.gmop+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.gtpc":{"source":"iana"},"application/vnd.3gpp.interworking-data":{"source":"iana"},"application/vnd.3gpp.lpp":{"source":"iana"},"application/vnd.3gpp.mc-signalling-ear":{"source":"iana"},"application/vnd.3gpp.mcdata-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-payload":{"source":"iana"},"application/vnd.3gpp.mcdata-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-signalling":{"source":"iana"},"application/vnd.3gpp.mcdata-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-floor-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-signed+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-init-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-transmission-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mid-call+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.ngap":{"source":"iana"},"application/vnd.3gpp.pfcp":{"source":"iana"},"application/vnd.3gpp.pic-bw-large":{"source":"iana","extensions":["plb"]},"application/vnd.3gpp.pic-bw-small":{"source":"iana","extensions":["psb"]},"application/vnd.3gpp.pic-bw-var":{"source":"iana","extensions":["pvb"]},"application/vnd.3gpp.s1ap":{"source":"iana"},"application/vnd.3gpp.sms":{"source":"iana"},"application/vnd.3gpp.sms+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-ext+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.state-and-event-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.ussd+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.bcmcsinfo+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.sms":{"source":"iana"},"application/vnd.3gpp2.tcap":{"source":"iana","extensions":["tcap"]},"application/vnd.3lightssoftware.imagescal":{"source":"iana"},"application/vnd.3m.post-it-notes":{"source":"iana","extensions":["pwn"]},"application/vnd.accpac.simply.aso":{"source":"iana","extensions":["aso"]},"application/vnd.accpac.simply.imp":{"source":"iana","extensions":["imp"]},"application/vnd.acucobol":{"source":"iana","extensions":["acu"]},"application/vnd.acucorp":{"source":"iana","extensions":["atc","acutc"]},"application/vnd.adobe.air-application-installer-package+zip":{"source":"apache","compressible":false,"extensions":["air"]},"application/vnd.adobe.flash.movie":{"source":"iana"},"application/vnd.adobe.formscentral.fcdt":{"source":"iana","extensions":["fcdt"]},"application/vnd.adobe.fxp":{"source":"iana","extensions":["fxp","fxpl"]},"application/vnd.adobe.partial-upload":{"source":"iana"},"application/vnd.adobe.xdp+xml":{"source":"iana","compressible":true,"extensions":["xdp"]},"application/vnd.adobe.xfdf":{"source":"iana","extensions":["xfdf"]},"application/vnd.aether.imp":{"source":"iana"},"application/vnd.afpc.afplinedata":{"source":"iana"},"application/vnd.afpc.afplinedata-pagedef":{"source":"iana"},"application/vnd.afpc.cmoca-cmresource":{"source":"iana"},"application/vnd.afpc.foca-charset":{"source":"iana"},"application/vnd.afpc.foca-codedfont":{"source":"iana"},"application/vnd.afpc.foca-codepage":{"source":"iana"},"application/vnd.afpc.modca":{"source":"iana"},"application/vnd.afpc.modca-cmtable":{"source":"iana"},"application/vnd.afpc.modca-formdef":{"source":"iana"},"application/vnd.afpc.modca-mediummap":{"source":"iana"},"application/vnd.afpc.modca-objectcontainer":{"source":"iana"},"application/vnd.afpc.modca-overlay":{"source":"iana"},"application/vnd.afpc.modca-pagesegment":{"source":"iana"},"application/vnd.age":{"source":"iana","extensions":["age"]},"application/vnd.ah-barcode":{"source":"iana"},"application/vnd.ahead.space":{"source":"iana","extensions":["ahead"]},"application/vnd.airzip.filesecure.azf":{"source":"iana","extensions":["azf"]},"application/vnd.airzip.filesecure.azs":{"source":"iana","extensions":["azs"]},"application/vnd.amadeus+json":{"source":"iana","compressible":true},"application/vnd.amazon.ebook":{"source":"apache","extensions":["azw"]},"application/vnd.amazon.mobi8-ebook":{"source":"iana"},"application/vnd.americandynamics.acc":{"source":"iana","extensions":["acc"]},"application/vnd.amiga.ami":{"source":"iana","extensions":["ami"]},"application/vnd.amundsen.maze+xml":{"source":"iana","compressible":true},"application/vnd.android.ota":{"source":"iana"},"application/vnd.android.package-archive":{"source":"apache","compressible":false,"extensions":["apk"]},"application/vnd.anki":{"source":"iana"},"application/vnd.anser-web-certificate-issue-initiation":{"source":"iana","extensions":["cii"]},"application/vnd.anser-web-funds-transfer-initiation":{"source":"apache","extensions":["fti"]},"application/vnd.antix.game-component":{"source":"iana","extensions":["atx"]},"application/vnd.apache.arrow.file":{"source":"iana"},"application/vnd.apache.arrow.stream":{"source":"iana"},"application/vnd.apache.thrift.binary":{"source":"iana"},"application/vnd.apache.thrift.compact":{"source":"iana"},"application/vnd.apache.thrift.json":{"source":"iana"},"application/vnd.api+json":{"source":"iana","compressible":true},"application/vnd.aplextor.warrp+json":{"source":"iana","compressible":true},"application/vnd.apothekende.reservation+json":{"source":"iana","compressible":true},"application/vnd.apple.installer+xml":{"source":"iana","compressible":true,"extensions":["mpkg"]},"application/vnd.apple.keynote":{"source":"iana","extensions":["key"]},"application/vnd.apple.mpegurl":{"source":"iana","extensions":["m3u8"]},"application/vnd.apple.numbers":{"source":"iana","extensions":["numbers"]},"application/vnd.apple.pages":{"source":"iana","extensions":["pages"]},"application/vnd.apple.pkpass":{"compressible":false,"extensions":["pkpass"]},"application/vnd.arastra.swi":{"source":"iana"},"application/vnd.aristanetworks.swi":{"source":"iana","extensions":["swi"]},"application/vnd.artisan+json":{"source":"iana","compressible":true},"application/vnd.artsquare":{"source":"iana"},"application/vnd.astraea-software.iota":{"source":"iana","extensions":["iota"]},"application/vnd.audiograph":{"source":"iana","extensions":["aep"]},"application/vnd.autopackage":{"source":"iana"},"application/vnd.avalon+json":{"source":"iana","compressible":true},"application/vnd.avistar+xml":{"source":"iana","compressible":true},"application/vnd.balsamiq.bmml+xml":{"source":"iana","compressible":true,"extensions":["bmml"]},"application/vnd.balsamiq.bmpr":{"source":"iana"},"application/vnd.banana-accounting":{"source":"iana"},"application/vnd.bbf.usp.error":{"source":"iana"},"application/vnd.bbf.usp.msg":{"source":"iana"},"application/vnd.bbf.usp.msg+json":{"source":"iana","compressible":true},"application/vnd.bekitzur-stech+json":{"source":"iana","compressible":true},"application/vnd.bint.med-content":{"source":"iana"},"application/vnd.biopax.rdf+xml":{"source":"iana","compressible":true},"application/vnd.blink-idb-value-wrapper":{"source":"iana"},"application/vnd.blueice.multipass":{"source":"iana","extensions":["mpm"]},"application/vnd.bluetooth.ep.oob":{"source":"iana"},"application/vnd.bluetooth.le.oob":{"source":"iana"},"application/vnd.bmi":{"source":"iana","extensions":["bmi"]},"application/vnd.bpf":{"source":"iana"},"application/vnd.bpf3":{"source":"iana"},"application/vnd.businessobjects":{"source":"iana","extensions":["rep"]},"application/vnd.byu.uapi+json":{"source":"iana","compressible":true},"application/vnd.cab-jscript":{"source":"iana"},"application/vnd.canon-cpdl":{"source":"iana"},"application/vnd.canon-lips":{"source":"iana"},"application/vnd.capasystems-pg+json":{"source":"iana","compressible":true},"application/vnd.cendio.thinlinc.clientconf":{"source":"iana"},"application/vnd.century-systems.tcp_stream":{"source":"iana"},"application/vnd.chemdraw+xml":{"source":"iana","compressible":true,"extensions":["cdxml"]},"application/vnd.chess-pgn":{"source":"iana"},"application/vnd.chipnuts.karaoke-mmd":{"source":"iana","extensions":["mmd"]},"application/vnd.ciedi":{"source":"iana"},"application/vnd.cinderella":{"source":"iana","extensions":["cdy"]},"application/vnd.cirpack.isdn-ext":{"source":"iana"},"application/vnd.citationstyles.style+xml":{"source":"iana","compressible":true,"extensions":["csl"]},"application/vnd.claymore":{"source":"iana","extensions":["cla"]},"application/vnd.cloanto.rp9":{"source":"iana","extensions":["rp9"]},"application/vnd.clonk.c4group":{"source":"iana","extensions":["c4g","c4d","c4f","c4p","c4u"]},"application/vnd.cluetrust.cartomobile-config":{"source":"iana","extensions":["c11amc"]},"application/vnd.cluetrust.cartomobile-config-pkg":{"source":"iana","extensions":["c11amz"]},"application/vnd.coffeescript":{"source":"iana"},"application/vnd.collabio.xodocuments.document":{"source":"iana"},"application/vnd.collabio.xodocuments.document-template":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation-template":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet-template":{"source":"iana"},"application/vnd.collection+json":{"source":"iana","compressible":true},"application/vnd.collection.doc+json":{"source":"iana","compressible":true},"application/vnd.collection.next+json":{"source":"iana","compressible":true},"application/vnd.comicbook+zip":{"source":"iana","compressible":false},"application/vnd.comicbook-rar":{"source":"iana"},"application/vnd.commerce-battelle":{"source":"iana"},"application/vnd.commonspace":{"source":"iana","extensions":["csp"]},"application/vnd.contact.cmsg":{"source":"iana","extensions":["cdbcmsg"]},"application/vnd.coreos.ignition+json":{"source":"iana","compressible":true},"application/vnd.cosmocaller":{"source":"iana","extensions":["cmc"]},"application/vnd.crick.clicker":{"source":"iana","extensions":["clkx"]},"application/vnd.crick.clicker.keyboard":{"source":"iana","extensions":["clkk"]},"application/vnd.crick.clicker.palette":{"source":"iana","extensions":["clkp"]},"application/vnd.crick.clicker.template":{"source":"iana","extensions":["clkt"]},"application/vnd.crick.clicker.wordbank":{"source":"iana","extensions":["clkw"]},"application/vnd.criticaltools.wbs+xml":{"source":"iana","compressible":true,"extensions":["wbs"]},"application/vnd.cryptii.pipe+json":{"source":"iana","compressible":true},"application/vnd.crypto-shade-file":{"source":"iana"},"application/vnd.cryptomator.encrypted":{"source":"iana"},"application/vnd.cryptomator.vault":{"source":"iana"},"application/vnd.ctc-posml":{"source":"iana","extensions":["pml"]},"application/vnd.ctct.ws+xml":{"source":"iana","compressible":true},"application/vnd.cups-pdf":{"source":"iana"},"application/vnd.cups-postscript":{"source":"iana"},"application/vnd.cups-ppd":{"source":"iana","extensions":["ppd"]},"application/vnd.cups-raster":{"source":"iana"},"application/vnd.cups-raw":{"source":"iana"},"application/vnd.curl":{"source":"iana"},"application/vnd.curl.car":{"source":"apache","extensions":["car"]},"application/vnd.curl.pcurl":{"source":"apache","extensions":["pcurl"]},"application/vnd.cyan.dean.root+xml":{"source":"iana","compressible":true},"application/vnd.cybank":{"source":"iana"},"application/vnd.cyclonedx+json":{"source":"iana","compressible":true},"application/vnd.cyclonedx+xml":{"source":"iana","compressible":true},"application/vnd.d2l.coursepackage1p0+zip":{"source":"iana","compressible":false},"application/vnd.d3m-dataset":{"source":"iana"},"application/vnd.d3m-problem":{"source":"iana"},"application/vnd.dart":{"source":"iana","compressible":true,"extensions":["dart"]},"application/vnd.data-vision.rdz":{"source":"iana","extensions":["rdz"]},"application/vnd.datapackage+json":{"source":"iana","compressible":true},"application/vnd.dataresource+json":{"source":"iana","compressible":true},"application/vnd.dbf":{"source":"iana","extensions":["dbf"]},"application/vnd.debian.binary-package":{"source":"iana"},"application/vnd.dece.data":{"source":"iana","extensions":["uvf","uvvf","uvd","uvvd"]},"application/vnd.dece.ttml+xml":{"source":"iana","compressible":true,"extensions":["uvt","uvvt"]},"application/vnd.dece.unspecified":{"source":"iana","extensions":["uvx","uvvx"]},"application/vnd.dece.zip":{"source":"iana","extensions":["uvz","uvvz"]},"application/vnd.denovo.fcselayout-link":{"source":"iana","extensions":["fe_launch"]},"application/vnd.desmume.movie":{"source":"iana"},"application/vnd.dir-bi.plate-dl-nosuffix":{"source":"iana"},"application/vnd.dm.delegation+xml":{"source":"iana","compressible":true},"application/vnd.dna":{"source":"iana","extensions":["dna"]},"application/vnd.document+json":{"source":"iana","compressible":true},"application/vnd.dolby.mlp":{"source":"apache","extensions":["mlp"]},"application/vnd.dolby.mobile.1":{"source":"iana"},"application/vnd.dolby.mobile.2":{"source":"iana"},"application/vnd.doremir.scorecloud-binary-document":{"source":"iana"},"application/vnd.dpgraph":{"source":"iana","extensions":["dpg"]},"application/vnd.dreamfactory":{"source":"iana","extensions":["dfac"]},"application/vnd.drive+json":{"source":"iana","compressible":true},"application/vnd.ds-keypoint":{"source":"apache","extensions":["kpxx"]},"application/vnd.dtg.local":{"source":"iana"},"application/vnd.dtg.local.flash":{"source":"iana"},"application/vnd.dtg.local.html":{"source":"iana"},"application/vnd.dvb.ait":{"source":"iana","extensions":["ait"]},"application/vnd.dvb.dvbisl+xml":{"source":"iana","compressible":true},"application/vnd.dvb.dvbj":{"source":"iana"},"application/vnd.dvb.esgcontainer":{"source":"iana"},"application/vnd.dvb.ipdcdftnotifaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess2":{"source":"iana"},"application/vnd.dvb.ipdcesgpdd":{"source":"iana"},"application/vnd.dvb.ipdcroaming":{"source":"iana"},"application/vnd.dvb.iptv.alfec-base":{"source":"iana"},"application/vnd.dvb.iptv.alfec-enhancement":{"source":"iana"},"application/vnd.dvb.notif-aggregate-root+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-container+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-generic+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-msglist+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-request+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-response+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-init+xml":{"source":"iana","compressible":true},"application/vnd.dvb.pfr":{"source":"iana"},"application/vnd.dvb.service":{"source":"iana","extensions":["svc"]},"application/vnd.dxr":{"source":"iana"},"application/vnd.dynageo":{"source":"iana","extensions":["geo"]},"application/vnd.dzr":{"source":"iana"},"application/vnd.easykaraoke.cdgdownload":{"source":"iana"},"application/vnd.ecdis-update":{"source":"iana"},"application/vnd.ecip.rlp":{"source":"iana"},"application/vnd.ecowin.chart":{"source":"iana","extensions":["mag"]},"application/vnd.ecowin.filerequest":{"source":"iana"},"application/vnd.ecowin.fileupdate":{"source":"iana"},"application/vnd.ecowin.series":{"source":"iana"},"application/vnd.ecowin.seriesrequest":{"source":"iana"},"application/vnd.ecowin.seriesupdate":{"source":"iana"},"application/vnd.efi.img":{"source":"iana"},"application/vnd.efi.iso":{"source":"iana"},"application/vnd.emclient.accessrequest+xml":{"source":"iana","compressible":true},"application/vnd.enliven":{"source":"iana","extensions":["nml"]},"application/vnd.enphase.envoy":{"source":"iana"},"application/vnd.eprints.data+xml":{"source":"iana","compressible":true},"application/vnd.epson.esf":{"source":"iana","extensions":["esf"]},"application/vnd.epson.msf":{"source":"iana","extensions":["msf"]},"application/vnd.epson.quickanime":{"source":"iana","extensions":["qam"]},"application/vnd.epson.salt":{"source":"iana","extensions":["slt"]},"application/vnd.epson.ssf":{"source":"iana","extensions":["ssf"]},"application/vnd.ericsson.quickcall":{"source":"iana"},"application/vnd.espass-espass+zip":{"source":"iana","compressible":false},"application/vnd.eszigno3+xml":{"source":"iana","compressible":true,"extensions":["es3","et3"]},"application/vnd.etsi.aoc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.asic-e+zip":{"source":"iana","compressible":false},"application/vnd.etsi.asic-s+zip":{"source":"iana","compressible":false},"application/vnd.etsi.cug+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvcommand+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-bc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-cod+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-npvr+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvservice+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsync+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvueprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mcid+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mheg5":{"source":"iana"},"application/vnd.etsi.overload-control-policy-dataset+xml":{"source":"iana","compressible":true},"application/vnd.etsi.pstn+xml":{"source":"iana","compressible":true},"application/vnd.etsi.sci+xml":{"source":"iana","compressible":true},"application/vnd.etsi.simservs+xml":{"source":"iana","compressible":true},"application/vnd.etsi.timestamp-token":{"source":"iana"},"application/vnd.etsi.tsl+xml":{"source":"iana","compressible":true},"application/vnd.etsi.tsl.der":{"source":"iana"},"application/vnd.eudora.data":{"source":"iana"},"application/vnd.evolv.ecig.profile":{"source":"iana"},"application/vnd.evolv.ecig.settings":{"source":"iana"},"application/vnd.evolv.ecig.theme":{"source":"iana"},"application/vnd.exstream-empower+zip":{"source":"iana","compressible":false},"application/vnd.exstream-package":{"source":"iana"},"application/vnd.ezpix-album":{"source":"iana","extensions":["ez2"]},"application/vnd.ezpix-package":{"source":"iana","extensions":["ez3"]},"application/vnd.f-secure.mobile":{"source":"iana"},"application/vnd.fastcopy-disk-image":{"source":"iana"},"application/vnd.fdf":{"source":"iana","extensions":["fdf"]},"application/vnd.fdsn.mseed":{"source":"iana","extensions":["mseed"]},"application/vnd.fdsn.seed":{"source":"iana","extensions":["seed","dataless"]},"application/vnd.ffsns":{"source":"iana"},"application/vnd.ficlab.flb+zip":{"source":"iana","compressible":false},"application/vnd.filmit.zfc":{"source":"iana"},"application/vnd.fints":{"source":"iana"},"application/vnd.firemonkeys.cloudcell":{"source":"iana"},"application/vnd.flographit":{"source":"iana","extensions":["gph"]},"application/vnd.fluxtime.clip":{"source":"iana","extensions":["ftc"]},"application/vnd.font-fontforge-sfd":{"source":"iana"},"application/vnd.framemaker":{"source":"iana","extensions":["fm","frame","maker","book"]},"application/vnd.frogans.fnc":{"source":"iana","extensions":["fnc"]},"application/vnd.frogans.ltf":{"source":"iana","extensions":["ltf"]},"application/vnd.fsc.weblaunch":{"source":"iana","extensions":["fsc"]},"application/vnd.fujifilm.fb.docuworks":{"source":"iana"},"application/vnd.fujifilm.fb.docuworks.binder":{"source":"iana"},"application/vnd.fujifilm.fb.docuworks.container":{"source":"iana"},"application/vnd.fujifilm.fb.jfi+xml":{"source":"iana","compressible":true},"application/vnd.fujitsu.oasys":{"source":"iana","extensions":["oas"]},"application/vnd.fujitsu.oasys2":{"source":"iana","extensions":["oa2"]},"application/vnd.fujitsu.oasys3":{"source":"iana","extensions":["oa3"]},"application/vnd.fujitsu.oasysgp":{"source":"iana","extensions":["fg5"]},"application/vnd.fujitsu.oasysprs":{"source":"iana","extensions":["bh2"]},"application/vnd.fujixerox.art-ex":{"source":"iana"},"application/vnd.fujixerox.art4":{"source":"iana"},"application/vnd.fujixerox.ddd":{"source":"iana","extensions":["ddd"]},"application/vnd.fujixerox.docuworks":{"source":"iana","extensions":["xdw"]},"application/vnd.fujixerox.docuworks.binder":{"source":"iana","extensions":["xbd"]},"application/vnd.fujixerox.docuworks.container":{"source":"iana"},"application/vnd.fujixerox.hbpl":{"source":"iana"},"application/vnd.fut-misnet":{"source":"iana"},"application/vnd.futoin+cbor":{"source":"iana"},"application/vnd.futoin+json":{"source":"iana","compressible":true},"application/vnd.fuzzysheet":{"source":"iana","extensions":["fzs"]},"application/vnd.genomatix.tuxedo":{"source":"iana","extensions":["txd"]},"application/vnd.gentics.grd+json":{"source":"iana","compressible":true},"application/vnd.geo+json":{"source":"iana","compressible":true},"application/vnd.geocube+xml":{"source":"iana","compressible":true},"application/vnd.geogebra.file":{"source":"iana","extensions":["ggb"]},"application/vnd.geogebra.slides":{"source":"iana"},"application/vnd.geogebra.tool":{"source":"iana","extensions":["ggt"]},"application/vnd.geometry-explorer":{"source":"iana","extensions":["gex","gre"]},"application/vnd.geonext":{"source":"iana","extensions":["gxt"]},"application/vnd.geoplan":{"source":"iana","extensions":["g2w"]},"application/vnd.geospace":{"source":"iana","extensions":["g3w"]},"application/vnd.gerber":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt-response":{"source":"iana"},"application/vnd.gmx":{"source":"iana","extensions":["gmx"]},"application/vnd.google-apps.document":{"compressible":false,"extensions":["gdoc"]},"application/vnd.google-apps.presentation":{"compressible":false,"extensions":["gslides"]},"application/vnd.google-apps.spreadsheet":{"compressible":false,"extensions":["gsheet"]},"application/vnd.google-earth.kml+xml":{"source":"iana","compressible":true,"extensions":["kml"]},"application/vnd.google-earth.kmz":{"source":"iana","compressible":false,"extensions":["kmz"]},"application/vnd.gov.sk.e-form+xml":{"source":"iana","compressible":true},"application/vnd.gov.sk.e-form+zip":{"source":"iana","compressible":false},"application/vnd.gov.sk.xmldatacontainer+xml":{"source":"iana","compressible":true},"application/vnd.grafeq":{"source":"iana","extensions":["gqf","gqs"]},"application/vnd.gridmp":{"source":"iana"},"application/vnd.groove-account":{"source":"iana","extensions":["gac"]},"application/vnd.groove-help":{"source":"iana","extensions":["ghf"]},"application/vnd.groove-identity-message":{"source":"iana","extensions":["gim"]},"application/vnd.groove-injector":{"source":"iana","extensions":["grv"]},"application/vnd.groove-tool-message":{"source":"iana","extensions":["gtm"]},"application/vnd.groove-tool-template":{"source":"iana","extensions":["tpl"]},"application/vnd.groove-vcard":{"source":"iana","extensions":["vcg"]},"application/vnd.hal+json":{"source":"iana","compressible":true},"application/vnd.hal+xml":{"source":"iana","compressible":true,"extensions":["hal"]},"application/vnd.handheld-entertainment+xml":{"source":"iana","compressible":true,"extensions":["zmm"]},"application/vnd.hbci":{"source":"iana","extensions":["hbci"]},"application/vnd.hc+json":{"source":"iana","compressible":true},"application/vnd.hcl-bireports":{"source":"iana"},"application/vnd.hdt":{"source":"iana"},"application/vnd.heroku+json":{"source":"iana","compressible":true},"application/vnd.hhe.lesson-player":{"source":"iana","extensions":["les"]},"application/vnd.hp-hpgl":{"source":"iana","extensions":["hpgl"]},"application/vnd.hp-hpid":{"source":"iana","extensions":["hpid"]},"application/vnd.hp-hps":{"source":"iana","extensions":["hps"]},"application/vnd.hp-jlyt":{"source":"iana","extensions":["jlt"]},"application/vnd.hp-pcl":{"source":"iana","extensions":["pcl"]},"application/vnd.hp-pclxl":{"source":"iana","extensions":["pclxl"]},"application/vnd.httphone":{"source":"iana"},"application/vnd.hydrostatix.sof-data":{"source":"iana","extensions":["sfd-hdstx"]},"application/vnd.hyper+json":{"source":"iana","compressible":true},"application/vnd.hyper-item+json":{"source":"iana","compressible":true},"application/vnd.hyperdrive+json":{"source":"iana","compressible":true},"application/vnd.hzn-3d-crossword":{"source":"iana"},"application/vnd.ibm.afplinedata":{"source":"iana"},"application/vnd.ibm.electronic-media":{"source":"iana"},"application/vnd.ibm.minipay":{"source":"iana","extensions":["mpy"]},"application/vnd.ibm.modcap":{"source":"iana","extensions":["afp","listafp","list3820"]},"application/vnd.ibm.rights-management":{"source":"iana","extensions":["irm"]},"application/vnd.ibm.secure-container":{"source":"iana","extensions":["sc"]},"application/vnd.iccprofile":{"source":"iana","extensions":["icc","icm"]},"application/vnd.ieee.1905":{"source":"iana"},"application/vnd.igloader":{"source":"iana","extensions":["igl"]},"application/vnd.imagemeter.folder+zip":{"source":"iana","compressible":false},"application/vnd.imagemeter.image+zip":{"source":"iana","compressible":false},"application/vnd.immervision-ivp":{"source":"iana","extensions":["ivp"]},"application/vnd.immervision-ivu":{"source":"iana","extensions":["ivu"]},"application/vnd.ims.imsccv1p1":{"source":"iana"},"application/vnd.ims.imsccv1p2":{"source":"iana"},"application/vnd.ims.imsccv1p3":{"source":"iana"},"application/vnd.ims.lis.v2.result+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolconsumerprofile+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy.id+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings.simple+json":{"source":"iana","compressible":true},"application/vnd.informedcontrol.rms+xml":{"source":"iana","compressible":true},"application/vnd.informix-visionary":{"source":"iana"},"application/vnd.infotech.project":{"source":"iana"},"application/vnd.infotech.project+xml":{"source":"iana","compressible":true},"application/vnd.innopath.wamp.notification":{"source":"iana"},"application/vnd.insors.igm":{"source":"iana","extensions":["igm"]},"application/vnd.intercon.formnet":{"source":"iana","extensions":["xpw","xpx"]},"application/vnd.intergeo":{"source":"iana","extensions":["i2g"]},"application/vnd.intertrust.digibox":{"source":"iana"},"application/vnd.intertrust.nncp":{"source":"iana"},"application/vnd.intu.qbo":{"source":"iana","extensions":["qbo"]},"application/vnd.intu.qfx":{"source":"iana","extensions":["qfx"]},"application/vnd.iptc.g2.catalogitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.conceptitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.knowledgeitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsmessage+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.packageitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.planningitem+xml":{"source":"iana","compressible":true},"application/vnd.ipunplugged.rcprofile":{"source":"iana","extensions":["rcprofile"]},"application/vnd.irepository.package+xml":{"source":"iana","compressible":true,"extensions":["irp"]},"application/vnd.is-xpr":{"source":"iana","extensions":["xpr"]},"application/vnd.isac.fcs":{"source":"iana","extensions":["fcs"]},"application/vnd.iso11783-10+zip":{"source":"iana","compressible":false},"application/vnd.jam":{"source":"iana","extensions":["jam"]},"application/vnd.japannet-directory-service":{"source":"iana"},"application/vnd.japannet-jpnstore-wakeup":{"source":"iana"},"application/vnd.japannet-payment-wakeup":{"source":"iana"},"application/vnd.japannet-registration":{"source":"iana"},"application/vnd.japannet-registration-wakeup":{"source":"iana"},"application/vnd.japannet-setstore-wakeup":{"source":"iana"},"application/vnd.japannet-verification":{"source":"iana"},"application/vnd.japannet-verification-wakeup":{"source":"iana"},"application/vnd.jcp.javame.midlet-rms":{"source":"iana","extensions":["rms"]},"application/vnd.jisp":{"source":"iana","extensions":["jisp"]},"application/vnd.joost.joda-archive":{"source":"iana","extensions":["joda"]},"application/vnd.jsk.isdn-ngn":{"source":"iana"},"application/vnd.kahootz":{"source":"iana","extensions":["ktz","ktr"]},"application/vnd.kde.karbon":{"source":"iana","extensions":["karbon"]},"application/vnd.kde.kchart":{"source":"iana","extensions":["chrt"]},"application/vnd.kde.kformula":{"source":"iana","extensions":["kfo"]},"application/vnd.kde.kivio":{"source":"iana","extensions":["flw"]},"application/vnd.kde.kontour":{"source":"iana","extensions":["kon"]},"application/vnd.kde.kpresenter":{"source":"iana","extensions":["kpr","kpt"]},"application/vnd.kde.kspread":{"source":"iana","extensions":["ksp"]},"application/vnd.kde.kword":{"source":"iana","extensions":["kwd","kwt"]},"application/vnd.kenameaapp":{"source":"iana","extensions":["htke"]},"application/vnd.kidspiration":{"source":"iana","extensions":["kia"]},"application/vnd.kinar":{"source":"iana","extensions":["kne","knp"]},"application/vnd.koan":{"source":"iana","extensions":["skp","skd","skt","skm"]},"application/vnd.kodak-descriptor":{"source":"iana","extensions":["sse"]},"application/vnd.las":{"source":"iana"},"application/vnd.las.las+json":{"source":"iana","compressible":true},"application/vnd.las.las+xml":{"source":"iana","compressible":true,"extensions":["lasxml"]},"application/vnd.laszip":{"source":"iana"},"application/vnd.leap+json":{"source":"iana","compressible":true},"application/vnd.liberty-request+xml":{"source":"iana","compressible":true},"application/vnd.llamagraphics.life-balance.desktop":{"source":"iana","extensions":["lbd"]},"application/vnd.llamagraphics.life-balance.exchange+xml":{"source":"iana","compressible":true,"extensions":["lbe"]},"application/vnd.logipipe.circuit+zip":{"source":"iana","compressible":false},"application/vnd.loom":{"source":"iana"},"application/vnd.lotus-1-2-3":{"source":"iana","extensions":["123"]},"application/vnd.lotus-approach":{"source":"iana","extensions":["apr"]},"application/vnd.lotus-freelance":{"source":"iana","extensions":["pre"]},"application/vnd.lotus-notes":{"source":"iana","extensions":["nsf"]},"application/vnd.lotus-organizer":{"source":"iana","extensions":["org"]},"application/vnd.lotus-screencam":{"source":"iana","extensions":["scm"]},"application/vnd.lotus-wordpro":{"source":"iana","extensions":["lwp"]},"application/vnd.macports.portpkg":{"source":"iana","extensions":["portpkg"]},"application/vnd.mapbox-vector-tile":{"source":"iana","extensions":["mvt"]},"application/vnd.marlin.drm.actiontoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.conftoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.license+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.mdcf":{"source":"iana"},"application/vnd.mason+json":{"source":"iana","compressible":true},"application/vnd.maxmind.maxmind-db":{"source":"iana"},"application/vnd.mcd":{"source":"iana","extensions":["mcd"]},"application/vnd.medcalcdata":{"source":"iana","extensions":["mc1"]},"application/vnd.mediastation.cdkey":{"source":"iana","extensions":["cdkey"]},"application/vnd.meridian-slingshot":{"source":"iana"},"application/vnd.mfer":{"source":"iana","extensions":["mwf"]},"application/vnd.mfmp":{"source":"iana","extensions":["mfm"]},"application/vnd.micro+json":{"source":"iana","compressible":true},"application/vnd.micrografx.flo":{"source":"iana","extensions":["flo"]},"application/vnd.micrografx.igx":{"source":"iana","extensions":["igx"]},"application/vnd.microsoft.portable-executable":{"source":"iana"},"application/vnd.microsoft.windows.thumbnail-cache":{"source":"iana"},"application/vnd.miele+json":{"source":"iana","compressible":true},"application/vnd.mif":{"source":"iana","extensions":["mif"]},"application/vnd.minisoft-hp3000-save":{"source":"iana"},"application/vnd.mitsubishi.misty-guard.trustweb":{"source":"iana"},"application/vnd.mobius.daf":{"source":"iana","extensions":["daf"]},"application/vnd.mobius.dis":{"source":"iana","extensions":["dis"]},"application/vnd.mobius.mbk":{"source":"iana","extensions":["mbk"]},"application/vnd.mobius.mqy":{"source":"iana","extensions":["mqy"]},"application/vnd.mobius.msl":{"source":"iana","extensions":["msl"]},"application/vnd.mobius.plc":{"source":"iana","extensions":["plc"]},"application/vnd.mobius.txf":{"source":"iana","extensions":["txf"]},"application/vnd.mophun.application":{"source":"iana","extensions":["mpn"]},"application/vnd.mophun.certificate":{"source":"iana","extensions":["mpc"]},"application/vnd.motorola.flexsuite":{"source":"iana"},"application/vnd.motorola.flexsuite.adsi":{"source":"iana"},"application/vnd.motorola.flexsuite.fis":{"source":"iana"},"application/vnd.motorola.flexsuite.gotap":{"source":"iana"},"application/vnd.motorola.flexsuite.kmr":{"source":"iana"},"application/vnd.motorola.flexsuite.ttc":{"source":"iana"},"application/vnd.motorola.flexsuite.wem":{"source":"iana"},"application/vnd.motorola.iprm":{"source":"iana"},"application/vnd.mozilla.xul+xml":{"source":"iana","compressible":true,"extensions":["xul"]},"application/vnd.ms-3mfdocument":{"source":"iana"},"application/vnd.ms-artgalry":{"source":"iana","extensions":["cil"]},"application/vnd.ms-asf":{"source":"iana"},"application/vnd.ms-cab-compressed":{"source":"iana","extensions":["cab"]},"application/vnd.ms-color.iccprofile":{"source":"apache"},"application/vnd.ms-excel":{"source":"iana","compressible":false,"extensions":["xls","xlm","xla","xlc","xlt","xlw"]},"application/vnd.ms-excel.addin.macroenabled.12":{"source":"iana","extensions":["xlam"]},"application/vnd.ms-excel.sheet.binary.macroenabled.12":{"source":"iana","extensions":["xlsb"]},"application/vnd.ms-excel.sheet.macroenabled.12":{"source":"iana","extensions":["xlsm"]},"application/vnd.ms-excel.template.macroenabled.12":{"source":"iana","extensions":["xltm"]},"application/vnd.ms-fontobject":{"source":"iana","compressible":true,"extensions":["eot"]},"application/vnd.ms-htmlhelp":{"source":"iana","extensions":["chm"]},"application/vnd.ms-ims":{"source":"iana","extensions":["ims"]},"application/vnd.ms-lrm":{"source":"iana","extensions":["lrm"]},"application/vnd.ms-office.activex+xml":{"source":"iana","compressible":true},"application/vnd.ms-officetheme":{"source":"iana","extensions":["thmx"]},"application/vnd.ms-opentype":{"source":"apache","compressible":true},"application/vnd.ms-outlook":{"compressible":false,"extensions":["msg"]},"application/vnd.ms-package.obfuscated-opentype":{"source":"apache"},"application/vnd.ms-pki.seccat":{"source":"apache","extensions":["cat"]},"application/vnd.ms-pki.stl":{"source":"apache","extensions":["stl"]},"application/vnd.ms-playready.initiator+xml":{"source":"iana","compressible":true},"application/vnd.ms-powerpoint":{"source":"iana","compressible":false,"extensions":["ppt","pps","pot"]},"application/vnd.ms-powerpoint.addin.macroenabled.12":{"source":"iana","extensions":["ppam"]},"application/vnd.ms-powerpoint.presentation.macroenabled.12":{"source":"iana","extensions":["pptm"]},"application/vnd.ms-powerpoint.slide.macroenabled.12":{"source":"iana","extensions":["sldm"]},"application/vnd.ms-powerpoint.slideshow.macroenabled.12":{"source":"iana","extensions":["ppsm"]},"application/vnd.ms-powerpoint.template.macroenabled.12":{"source":"iana","extensions":["potm"]},"application/vnd.ms-printdevicecapabilities+xml":{"source":"iana","compressible":true},"application/vnd.ms-printing.printticket+xml":{"source":"apache","compressible":true},"application/vnd.ms-printschematicket+xml":{"source":"iana","compressible":true},"application/vnd.ms-project":{"source":"iana","extensions":["mpp","mpt"]},"application/vnd.ms-tnef":{"source":"iana"},"application/vnd.ms-windows.devicepairing":{"source":"iana"},"application/vnd.ms-windows.nwprinting.oob":{"source":"iana"},"application/vnd.ms-windows.printerpairing":{"source":"iana"},"application/vnd.ms-windows.wsd.oob":{"source":"iana"},"application/vnd.ms-wmdrm.lic-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.lic-resp":{"source":"iana"},"application/vnd.ms-wmdrm.meter-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.meter-resp":{"source":"iana"},"application/vnd.ms-word.document.macroenabled.12":{"source":"iana","extensions":["docm"]},"application/vnd.ms-word.template.macroenabled.12":{"source":"iana","extensions":["dotm"]},"application/vnd.ms-works":{"source":"iana","extensions":["wps","wks","wcm","wdb"]},"application/vnd.ms-wpl":{"source":"iana","extensions":["wpl"]},"application/vnd.ms-xpsdocument":{"source":"iana","compressible":false,"extensions":["xps"]},"application/vnd.msa-disk-image":{"source":"iana"},"application/vnd.mseq":{"source":"iana","extensions":["mseq"]},"application/vnd.msign":{"source":"iana"},"application/vnd.multiad.creator":{"source":"iana"},"application/vnd.multiad.creator.cif":{"source":"iana"},"application/vnd.music-niff":{"source":"iana"},"application/vnd.musician":{"source":"iana","extensions":["mus"]},"application/vnd.muvee.style":{"source":"iana","extensions":["msty"]},"application/vnd.mynfc":{"source":"iana","extensions":["taglet"]},"application/vnd.nacamar.ybrid+json":{"source":"iana","compressible":true},"application/vnd.ncd.control":{"source":"iana"},"application/vnd.ncd.reference":{"source":"iana"},"application/vnd.nearst.inv+json":{"source":"iana","compressible":true},"application/vnd.nebumind.line":{"source":"iana"},"application/vnd.nervana":{"source":"iana"},"application/vnd.netfpx":{"source":"iana"},"application/vnd.neurolanguage.nlu":{"source":"iana","extensions":["nlu"]},"application/vnd.nimn":{"source":"iana"},"application/vnd.nintendo.nitro.rom":{"source":"iana"},"application/vnd.nintendo.snes.rom":{"source":"iana"},"application/vnd.nitf":{"source":"iana","extensions":["ntf","nitf"]},"application/vnd.noblenet-directory":{"source":"iana","extensions":["nnd"]},"application/vnd.noblenet-sealer":{"source":"iana","extensions":["nns"]},"application/vnd.noblenet-web":{"source":"iana","extensions":["nnw"]},"application/vnd.nokia.catalogs":{"source":"iana"},"application/vnd.nokia.conml+wbxml":{"source":"iana"},"application/vnd.nokia.conml+xml":{"source":"iana","compressible":true},"application/vnd.nokia.iptv.config+xml":{"source":"iana","compressible":true},"application/vnd.nokia.isds-radio-presets":{"source":"iana"},"application/vnd.nokia.landmark+wbxml":{"source":"iana"},"application/vnd.nokia.landmark+xml":{"source":"iana","compressible":true},"application/vnd.nokia.landmarkcollection+xml":{"source":"iana","compressible":true},"application/vnd.nokia.n-gage.ac+xml":{"source":"iana","compressible":true,"extensions":["ac"]},"application/vnd.nokia.n-gage.data":{"source":"iana","extensions":["ngdat"]},"application/vnd.nokia.n-gage.symbian.install":{"source":"iana","extensions":["n-gage"]},"application/vnd.nokia.ncd":{"source":"iana"},"application/vnd.nokia.pcd+wbxml":{"source":"iana"},"application/vnd.nokia.pcd+xml":{"source":"iana","compressible":true},"application/vnd.nokia.radio-preset":{"source":"iana","extensions":["rpst"]},"application/vnd.nokia.radio-presets":{"source":"iana","extensions":["rpss"]},"application/vnd.novadigm.edm":{"source":"iana","extensions":["edm"]},"application/vnd.novadigm.edx":{"source":"iana","extensions":["edx"]},"application/vnd.novadigm.ext":{"source":"iana","extensions":["ext"]},"application/vnd.ntt-local.content-share":{"source":"iana"},"application/vnd.ntt-local.file-transfer":{"source":"iana"},"application/vnd.ntt-local.ogw_remote-access":{"source":"iana"},"application/vnd.ntt-local.sip-ta_remote":{"source":"iana"},"application/vnd.ntt-local.sip-ta_tcp_stream":{"source":"iana"},"application/vnd.oasis.opendocument.chart":{"source":"iana","extensions":["odc"]},"application/vnd.oasis.opendocument.chart-template":{"source":"iana","extensions":["otc"]},"application/vnd.oasis.opendocument.database":{"source":"iana","extensions":["odb"]},"application/vnd.oasis.opendocument.formula":{"source":"iana","extensions":["odf"]},"application/vnd.oasis.opendocument.formula-template":{"source":"iana","extensions":["odft"]},"application/vnd.oasis.opendocument.graphics":{"source":"iana","compressible":false,"extensions":["odg"]},"application/vnd.oasis.opendocument.graphics-template":{"source":"iana","extensions":["otg"]},"application/vnd.oasis.opendocument.image":{"source":"iana","extensions":["odi"]},"application/vnd.oasis.opendocument.image-template":{"source":"iana","extensions":["oti"]},"application/vnd.oasis.opendocument.presentation":{"source":"iana","compressible":false,"extensions":["odp"]},"application/vnd.oasis.opendocument.presentation-template":{"source":"iana","extensions":["otp"]},"application/vnd.oasis.opendocument.spreadsheet":{"source":"iana","compressible":false,"extensions":["ods"]},"application/vnd.oasis.opendocument.spreadsheet-template":{"source":"iana","extensions":["ots"]},"application/vnd.oasis.opendocument.text":{"source":"iana","compressible":false,"extensions":["odt"]},"application/vnd.oasis.opendocument.text-master":{"source":"iana","extensions":["odm"]},"application/vnd.oasis.opendocument.text-template":{"source":"iana","extensions":["ott"]},"application/vnd.oasis.opendocument.text-web":{"source":"iana","extensions":["oth"]},"application/vnd.obn":{"source":"iana"},"application/vnd.ocf+cbor":{"source":"iana"},"application/vnd.oci.image.manifest.v1+json":{"source":"iana","compressible":true},"application/vnd.oftn.l10n+json":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessdownload+xml":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessstreaming+xml":{"source":"iana","compressible":true},"application/vnd.oipf.cspg-hexbinary":{"source":"iana"},"application/vnd.oipf.dae.svg+xml":{"source":"iana","compressible":true},"application/vnd.oipf.dae.xhtml+xml":{"source":"iana","compressible":true},"application/vnd.oipf.mippvcontrolmessage+xml":{"source":"iana","compressible":true},"application/vnd.oipf.pae.gem":{"source":"iana"},"application/vnd.oipf.spdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.oipf.spdlist+xml":{"source":"iana","compressible":true},"application/vnd.oipf.ueprofile+xml":{"source":"iana","compressible":true},"application/vnd.oipf.userprofile+xml":{"source":"iana","compressible":true},"application/vnd.olpc-sugar":{"source":"iana","extensions":["xo"]},"application/vnd.oma-scws-config":{"source":"iana"},"application/vnd.oma-scws-http-request":{"source":"iana"},"application/vnd.oma-scws-http-response":{"source":"iana"},"application/vnd.oma.bcast.associated-procedure-parameter+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.drm-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.imd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.ltkm":{"source":"iana"},"application/vnd.oma.bcast.notification+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.provisioningtrigger":{"source":"iana"},"application/vnd.oma.bcast.sgboot":{"source":"iana"},"application/vnd.oma.bcast.sgdd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sgdu":{"source":"iana"},"application/vnd.oma.bcast.simple-symbol-container":{"source":"iana"},"application/vnd.oma.bcast.smartcard-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sprov+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.stkm":{"source":"iana"},"application/vnd.oma.cab-address-book+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-feature-handler+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-pcc+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-subs-invite+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-user-prefs+xml":{"source":"iana","compressible":true},"application/vnd.oma.dcd":{"source":"iana"},"application/vnd.oma.dcdc":{"source":"iana"},"application/vnd.oma.dd2+xml":{"source":"iana","compressible":true,"extensions":["dd2"]},"application/vnd.oma.drm.risd+xml":{"source":"iana","compressible":true},"application/vnd.oma.group-usage-list+xml":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+cbor":{"source":"iana"},"application/vnd.oma.lwm2m+json":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+tlv":{"source":"iana"},"application/vnd.oma.pal+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.detailed-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.final-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.groups+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.invocation-descriptor+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.optimized-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.push":{"source":"iana"},"application/vnd.oma.scidm.messages+xml":{"source":"iana","compressible":true},"application/vnd.oma.xcap-directory+xml":{"source":"iana","compressible":true},"application/vnd.omads-email+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omads-file+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omads-folder+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omaloc-supl-init":{"source":"iana"},"application/vnd.onepager":{"source":"iana"},"application/vnd.onepagertamp":{"source":"iana"},"application/vnd.onepagertamx":{"source":"iana"},"application/vnd.onepagertat":{"source":"iana"},"application/vnd.onepagertatp":{"source":"iana"},"application/vnd.onepagertatx":{"source":"iana"},"application/vnd.openblox.game+xml":{"source":"iana","compressible":true,"extensions":["obgx"]},"application/vnd.openblox.game-binary":{"source":"iana"},"application/vnd.openeye.oeb":{"source":"iana"},"application/vnd.openofficeorg.extension":{"source":"apache","extensions":["oxt"]},"application/vnd.openstreetmap.data+xml":{"source":"iana","compressible":true,"extensions":["osm"]},"application/vnd.opentimestamps.ots":{"source":"iana"},"application/vnd.openxmlformats-officedocument.custom-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.customxmlproperties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawing+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chart+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.extended-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presentation":{"source":"iana","compressible":false,"extensions":["pptx"]},"application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slide":{"source":"iana","extensions":["sldx"]},"application/vnd.openxmlformats-officedocument.presentationml.slide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideshow":{"source":"iana","extensions":["ppsx"]},"application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tags+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.template":{"source":"iana","extensions":["potx"]},"application/vnd.openxmlformats-officedocument.presentationml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":{"source":"iana","compressible":false,"extensions":["xlsx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.template":{"source":"iana","extensions":["xltx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.theme+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.themeoverride+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.vmldrawing":{"source":"iana"},"application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document":{"source":"iana","compressible":false,"extensions":["docx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.template":{"source":"iana","extensions":["dotx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.core-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.relationships+xml":{"source":"iana","compressible":true},"application/vnd.oracle.resource+json":{"source":"iana","compressible":true},"application/vnd.orange.indata":{"source":"iana"},"application/vnd.osa.netdeploy":{"source":"iana"},"application/vnd.osgeo.mapguide.package":{"source":"iana","extensions":["mgp"]},"application/vnd.osgi.bundle":{"source":"iana"},"application/vnd.osgi.dp":{"source":"iana","extensions":["dp"]},"application/vnd.osgi.subsystem":{"source":"iana","extensions":["esa"]},"application/vnd.otps.ct-kip+xml":{"source":"iana","compressible":true},"application/vnd.oxli.countgraph":{"source":"iana"},"application/vnd.pagerduty+json":{"source":"iana","compressible":true},"application/vnd.palm":{"source":"iana","extensions":["pdb","pqa","oprc"]},"application/vnd.panoply":{"source":"iana"},"application/vnd.paos.xml":{"source":"iana"},"application/vnd.patentdive":{"source":"iana"},"application/vnd.patientecommsdoc":{"source":"iana"},"application/vnd.pawaafile":{"source":"iana","extensions":["paw"]},"application/vnd.pcos":{"source":"iana"},"application/vnd.pg.format":{"source":"iana","extensions":["str"]},"application/vnd.pg.osasli":{"source":"iana","extensions":["ei6"]},"application/vnd.piaccess.application-licence":{"source":"iana"},"application/vnd.picsel":{"source":"iana","extensions":["efif"]},"application/vnd.pmi.widget":{"source":"iana","extensions":["wg"]},"application/vnd.poc.group-advertisement+xml":{"source":"iana","compressible":true},"application/vnd.pocketlearn":{"source":"iana","extensions":["plf"]},"application/vnd.powerbuilder6":{"source":"iana","extensions":["pbd"]},"application/vnd.powerbuilder6-s":{"source":"iana"},"application/vnd.powerbuilder7":{"source":"iana"},"application/vnd.powerbuilder7-s":{"source":"iana"},"application/vnd.powerbuilder75":{"source":"iana"},"application/vnd.powerbuilder75-s":{"source":"iana"},"application/vnd.preminet":{"source":"iana"},"application/vnd.previewsystems.box":{"source":"iana","extensions":["box"]},"application/vnd.proteus.magazine":{"source":"iana","extensions":["mgz"]},"application/vnd.psfs":{"source":"iana"},"application/vnd.publishare-delta-tree":{"source":"iana","extensions":["qps"]},"application/vnd.pvi.ptid1":{"source":"iana","extensions":["ptid"]},"application/vnd.pwg-multiplexed":{"source":"iana"},"application/vnd.pwg-xhtml-print+xml":{"source":"iana","compressible":true},"application/vnd.qualcomm.brew-app-res":{"source":"iana"},"application/vnd.quarantainenet":{"source":"iana"},"application/vnd.quark.quarkxpress":{"source":"iana","extensions":["qxd","qxt","qwd","qwt","qxl","qxb"]},"application/vnd.quobject-quoxdocument":{"source":"iana"},"application/vnd.radisys.moml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conn+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-stream+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-base+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-detect+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-sendrecv+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-group+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-speech+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-transform+xml":{"source":"iana","compressible":true},"application/vnd.rainstor.data":{"source":"iana"},"application/vnd.rapid":{"source":"iana"},"application/vnd.rar":{"source":"iana","extensions":["rar"]},"application/vnd.realvnc.bed":{"source":"iana","extensions":["bed"]},"application/vnd.recordare.musicxml":{"source":"iana","extensions":["mxl"]},"application/vnd.recordare.musicxml+xml":{"source":"iana","compressible":true,"extensions":["musicxml"]},"application/vnd.renlearn.rlprint":{"source":"iana"},"application/vnd.resilient.logic":{"source":"iana"},"application/vnd.restful+json":{"source":"iana","compressible":true},"application/vnd.rig.cryptonote":{"source":"iana","extensions":["cryptonote"]},"application/vnd.rim.cod":{"source":"apache","extensions":["cod"]},"application/vnd.rn-realmedia":{"source":"apache","extensions":["rm"]},"application/vnd.rn-realmedia-vbr":{"source":"apache","extensions":["rmvb"]},"application/vnd.route66.link66+xml":{"source":"iana","compressible":true,"extensions":["link66"]},"application/vnd.rs-274x":{"source":"iana"},"application/vnd.ruckus.download":{"source":"iana"},"application/vnd.s3sms":{"source":"iana"},"application/vnd.sailingtracker.track":{"source":"iana","extensions":["st"]},"application/vnd.sar":{"source":"iana"},"application/vnd.sbm.cid":{"source":"iana"},"application/vnd.sbm.mid2":{"source":"iana"},"application/vnd.scribus":{"source":"iana"},"application/vnd.sealed.3df":{"source":"iana"},"application/vnd.sealed.csf":{"source":"iana"},"application/vnd.sealed.doc":{"source":"iana"},"application/vnd.sealed.eml":{"source":"iana"},"application/vnd.sealed.mht":{"source":"iana"},"application/vnd.sealed.net":{"source":"iana"},"application/vnd.sealed.ppt":{"source":"iana"},"application/vnd.sealed.tiff":{"source":"iana"},"application/vnd.sealed.xls":{"source":"iana"},"application/vnd.sealedmedia.softseal.html":{"source":"iana"},"application/vnd.sealedmedia.softseal.pdf":{"source":"iana"},"application/vnd.seemail":{"source":"iana","extensions":["see"]},"application/vnd.seis+json":{"source":"iana","compressible":true},"application/vnd.sema":{"source":"iana","extensions":["sema"]},"application/vnd.semd":{"source":"iana","extensions":["semd"]},"application/vnd.semf":{"source":"iana","extensions":["semf"]},"application/vnd.shade-save-file":{"source":"iana"},"application/vnd.shana.informed.formdata":{"source":"iana","extensions":["ifm"]},"application/vnd.shana.informed.formtemplate":{"source":"iana","extensions":["itp"]},"application/vnd.shana.informed.interchange":{"source":"iana","extensions":["iif"]},"application/vnd.shana.informed.package":{"source":"iana","extensions":["ipk"]},"application/vnd.shootproof+json":{"source":"iana","compressible":true},"application/vnd.shopkick+json":{"source":"iana","compressible":true},"application/vnd.shp":{"source":"iana"},"application/vnd.shx":{"source":"iana"},"application/vnd.sigrok.session":{"source":"iana"},"application/vnd.simtech-mindmapper":{"source":"iana","extensions":["twd","twds"]},"application/vnd.siren+json":{"source":"iana","compressible":true},"application/vnd.smaf":{"source":"iana","extensions":["mmf"]},"application/vnd.smart.notebook":{"source":"iana"},"application/vnd.smart.teacher":{"source":"iana","extensions":["teacher"]},"application/vnd.snesdev-page-table":{"source":"iana"},"application/vnd.software602.filler.form+xml":{"source":"iana","compressible":true,"extensions":["fo"]},"application/vnd.software602.filler.form-xml-zip":{"source":"iana"},"application/vnd.solent.sdkm+xml":{"source":"iana","compressible":true,"extensions":["sdkm","sdkd"]},"application/vnd.spotfire.dxp":{"source":"iana","extensions":["dxp"]},"application/vnd.spotfire.sfs":{"source":"iana","extensions":["sfs"]},"application/vnd.sqlite3":{"source":"iana"},"application/vnd.sss-cod":{"source":"iana"},"application/vnd.sss-dtf":{"source":"iana"},"application/vnd.sss-ntf":{"source":"iana"},"application/vnd.stardivision.calc":{"source":"apache","extensions":["sdc"]},"application/vnd.stardivision.draw":{"source":"apache","extensions":["sda"]},"application/vnd.stardivision.impress":{"source":"apache","extensions":["sdd"]},"application/vnd.stardivision.math":{"source":"apache","extensions":["smf"]},"application/vnd.stardivision.writer":{"source":"apache","extensions":["sdw","vor"]},"application/vnd.stardivision.writer-global":{"source":"apache","extensions":["sgl"]},"application/vnd.stepmania.package":{"source":"iana","extensions":["smzip"]},"application/vnd.stepmania.stepchart":{"source":"iana","extensions":["sm"]},"application/vnd.street-stream":{"source":"iana"},"application/vnd.sun.wadl+xml":{"source":"iana","compressible":true,"extensions":["wadl"]},"application/vnd.sun.xml.calc":{"source":"apache","extensions":["sxc"]},"application/vnd.sun.xml.calc.template":{"source":"apache","extensions":["stc"]},"application/vnd.sun.xml.draw":{"source":"apache","extensions":["sxd"]},"application/vnd.sun.xml.draw.template":{"source":"apache","extensions":["std"]},"application/vnd.sun.xml.impress":{"source":"apache","extensions":["sxi"]},"application/vnd.sun.xml.impress.template":{"source":"apache","extensions":["sti"]},"application/vnd.sun.xml.math":{"source":"apache","extensions":["sxm"]},"application/vnd.sun.xml.writer":{"source":"apache","extensions":["sxw"]},"application/vnd.sun.xml.writer.global":{"source":"apache","extensions":["sxg"]},"application/vnd.sun.xml.writer.template":{"source":"apache","extensions":["stw"]},"application/vnd.sus-calendar":{"source":"iana","extensions":["sus","susp"]},"application/vnd.svd":{"source":"iana","extensions":["svd"]},"application/vnd.swiftview-ics":{"source":"iana"},"application/vnd.sycle+xml":{"source":"iana","compressible":true},"application/vnd.symbian.install":{"source":"apache","extensions":["sis","sisx"]},"application/vnd.syncml+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["xsm"]},"application/vnd.syncml.dm+wbxml":{"source":"iana","charset":"UTF-8","extensions":["bdm"]},"application/vnd.syncml.dm+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["xdm"]},"application/vnd.syncml.dm.notification":{"source":"iana"},"application/vnd.syncml.dmddf+wbxml":{"source":"iana"},"application/vnd.syncml.dmddf+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["ddf"]},"application/vnd.syncml.dmtnds+wbxml":{"source":"iana"},"application/vnd.syncml.dmtnds+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.syncml.ds.notification":{"source":"iana"},"application/vnd.tableschema+json":{"source":"iana","compressible":true},"application/vnd.tao.intent-module-archive":{"source":"iana","extensions":["tao"]},"application/vnd.tcpdump.pcap":{"source":"iana","extensions":["pcap","cap","dmp"]},"application/vnd.think-cell.ppttc+json":{"source":"iana","compressible":true},"application/vnd.tmd.mediaflex.api+xml":{"source":"iana","compressible":true},"application/vnd.tml":{"source":"iana"},"application/vnd.tmobile-livetv":{"source":"iana","extensions":["tmo"]},"application/vnd.tri.onesource":{"source":"iana"},"application/vnd.trid.tpt":{"source":"iana","extensions":["tpt"]},"application/vnd.triscape.mxs":{"source":"iana","extensions":["mxs"]},"application/vnd.trueapp":{"source":"iana","extensions":["tra"]},"application/vnd.truedoc":{"source":"iana"},"application/vnd.ubisoft.webplayer":{"source":"iana"},"application/vnd.ufdl":{"source":"iana","extensions":["ufd","ufdl"]},"application/vnd.uiq.theme":{"source":"iana","extensions":["utz"]},"application/vnd.umajin":{"source":"iana","extensions":["umj"]},"application/vnd.unity":{"source":"iana","extensions":["unityweb"]},"application/vnd.uoml+xml":{"source":"iana","compressible":true,"extensions":["uoml"]},"application/vnd.uplanet.alert":{"source":"iana"},"application/vnd.uplanet.alert-wbxml":{"source":"iana"},"application/vnd.uplanet.bearer-choice":{"source":"iana"},"application/vnd.uplanet.bearer-choice-wbxml":{"source":"iana"},"application/vnd.uplanet.cacheop":{"source":"iana"},"application/vnd.uplanet.cacheop-wbxml":{"source":"iana"},"application/vnd.uplanet.channel":{"source":"iana"},"application/vnd.uplanet.channel-wbxml":{"source":"iana"},"application/vnd.uplanet.list":{"source":"iana"},"application/vnd.uplanet.list-wbxml":{"source":"iana"},"application/vnd.uplanet.listcmd":{"source":"iana"},"application/vnd.uplanet.listcmd-wbxml":{"source":"iana"},"application/vnd.uplanet.signal":{"source":"iana"},"application/vnd.uri-map":{"source":"iana"},"application/vnd.valve.source.material":{"source":"iana"},"application/vnd.vcx":{"source":"iana","extensions":["vcx"]},"application/vnd.vd-study":{"source":"iana"},"application/vnd.vectorworks":{"source":"iana"},"application/vnd.vel+json":{"source":"iana","compressible":true},"application/vnd.verimatrix.vcas":{"source":"iana"},"application/vnd.veritone.aion+json":{"source":"iana","compressible":true},"application/vnd.veryant.thin":{"source":"iana"},"application/vnd.ves.encrypted":{"source":"iana"},"application/vnd.vidsoft.vidconference":{"source":"iana"},"application/vnd.visio":{"source":"iana","extensions":["vsd","vst","vss","vsw"]},"application/vnd.visionary":{"source":"iana","extensions":["vis"]},"application/vnd.vividence.scriptfile":{"source":"iana"},"application/vnd.vsf":{"source":"iana","extensions":["vsf"]},"application/vnd.wap.sic":{"source":"iana"},"application/vnd.wap.slc":{"source":"iana"},"application/vnd.wap.wbxml":{"source":"iana","charset":"UTF-8","extensions":["wbxml"]},"application/vnd.wap.wmlc":{"source":"iana","extensions":["wmlc"]},"application/vnd.wap.wmlscriptc":{"source":"iana","extensions":["wmlsc"]},"application/vnd.webturbo":{"source":"iana","extensions":["wtb"]},"application/vnd.wfa.dpp":{"source":"iana"},"application/vnd.wfa.p2p":{"source":"iana"},"application/vnd.wfa.wsc":{"source":"iana"},"application/vnd.windows.devicepairing":{"source":"iana"},"application/vnd.wmc":{"source":"iana"},"application/vnd.wmf.bootstrap":{"source":"iana"},"application/vnd.wolfram.mathematica":{"source":"iana"},"application/vnd.wolfram.mathematica.package":{"source":"iana"},"application/vnd.wolfram.player":{"source":"iana","extensions":["nbp"]},"application/vnd.wordperfect":{"source":"iana","extensions":["wpd"]},"application/vnd.wqd":{"source":"iana","extensions":["wqd"]},"application/vnd.wrq-hp3000-labelled":{"source":"iana"},"application/vnd.wt.stf":{"source":"iana","extensions":["stf"]},"application/vnd.wv.csp+wbxml":{"source":"iana"},"application/vnd.wv.csp+xml":{"source":"iana","compressible":true},"application/vnd.wv.ssp+xml":{"source":"iana","compressible":true},"application/vnd.xacml+json":{"source":"iana","compressible":true},"application/vnd.xara":{"source":"iana","extensions":["xar"]},"application/vnd.xfdl":{"source":"iana","extensions":["xfdl"]},"application/vnd.xfdl.webform":{"source":"iana"},"application/vnd.xmi+xml":{"source":"iana","compressible":true},"application/vnd.xmpie.cpkg":{"source":"iana"},"application/vnd.xmpie.dpkg":{"source":"iana"},"application/vnd.xmpie.plan":{"source":"iana"},"application/vnd.xmpie.ppkg":{"source":"iana"},"application/vnd.xmpie.xlim":{"source":"iana"},"application/vnd.yamaha.hv-dic":{"source":"iana","extensions":["hvd"]},"application/vnd.yamaha.hv-script":{"source":"iana","extensions":["hvs"]},"application/vnd.yamaha.hv-voice":{"source":"iana","extensions":["hvp"]},"application/vnd.yamaha.openscoreformat":{"source":"iana","extensions":["osf"]},"application/vnd.yamaha.openscoreformat.osfpvg+xml":{"source":"iana","compressible":true,"extensions":["osfpvg"]},"application/vnd.yamaha.remote-setup":{"source":"iana"},"application/vnd.yamaha.smaf-audio":{"source":"iana","extensions":["saf"]},"application/vnd.yamaha.smaf-phrase":{"source":"iana","extensions":["spf"]},"application/vnd.yamaha.through-ngn":{"source":"iana"},"application/vnd.yamaha.tunnel-udpencap":{"source":"iana"},"application/vnd.yaoweme":{"source":"iana"},"application/vnd.yellowriver-custom-menu":{"source":"iana","extensions":["cmp"]},"application/vnd.youtube.yt":{"source":"iana"},"application/vnd.zul":{"source":"iana","extensions":["zir","zirz"]},"application/vnd.zzazz.deck+xml":{"source":"iana","compressible":true,"extensions":["zaz"]},"application/voicexml+xml":{"source":"iana","compressible":true,"extensions":["vxml"]},"application/voucher-cms+json":{"source":"iana","compressible":true},"application/vq-rtcpxr":{"source":"iana"},"application/wasm":{"source":"iana","compressible":true,"extensions":["wasm"]},"application/watcherinfo+xml":{"source":"iana","compressible":true},"application/webpush-options+json":{"source":"iana","compressible":true},"application/whoispp-query":{"source":"iana"},"application/whoispp-response":{"source":"iana"},"application/widget":{"source":"iana","extensions":["wgt"]},"application/winhlp":{"source":"apache","extensions":["hlp"]},"application/wita":{"source":"iana"},"application/wordperfect5.1":{"source":"iana"},"application/wsdl+xml":{"source":"iana","compressible":true,"extensions":["wsdl"]},"application/wspolicy+xml":{"source":"iana","compressible":true,"extensions":["wspolicy"]},"application/x-7z-compressed":{"source":"apache","compressible":false,"extensions":["7z"]},"application/x-abiword":{"source":"apache","extensions":["abw"]},"application/x-ace-compressed":{"source":"apache","extensions":["ace"]},"application/x-amf":{"source":"apache"},"application/x-apple-diskimage":{"source":"apache","extensions":["dmg"]},"application/x-arj":{"compressible":false,"extensions":["arj"]},"application/x-authorware-bin":{"source":"apache","extensions":["aab","x32","u32","vox"]},"application/x-authorware-map":{"source":"apache","extensions":["aam"]},"application/x-authorware-seg":{"source":"apache","extensions":["aas"]},"application/x-bcpio":{"source":"apache","extensions":["bcpio"]},"application/x-bdoc":{"compressible":false,"extensions":["bdoc"]},"application/x-bittorrent":{"source":"apache","extensions":["torrent"]},"application/x-blorb":{"source":"apache","extensions":["blb","blorb"]},"application/x-bzip":{"source":"apache","compressible":false,"extensions":["bz"]},"application/x-bzip2":{"source":"apache","compressible":false,"extensions":["bz2","boz"]},"application/x-cbr":{"source":"apache","extensions":["cbr","cba","cbt","cbz","cb7"]},"application/x-cdlink":{"source":"apache","extensions":["vcd"]},"application/x-cfs-compressed":{"source":"apache","extensions":["cfs"]},"application/x-chat":{"source":"apache","extensions":["chat"]},"application/x-chess-pgn":{"source":"apache","extensions":["pgn"]},"application/x-chrome-extension":{"extensions":["crx"]},"application/x-cocoa":{"source":"nginx","extensions":["cco"]},"application/x-compress":{"source":"apache"},"application/x-conference":{"source":"apache","extensions":["nsc"]},"application/x-cpio":{"source":"apache","extensions":["cpio"]},"application/x-csh":{"source":"apache","extensions":["csh"]},"application/x-deb":{"compressible":false},"application/x-debian-package":{"source":"apache","extensions":["deb","udeb"]},"application/x-dgc-compressed":{"source":"apache","extensions":["dgc"]},"application/x-director":{"source":"apache","extensions":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"]},"application/x-doom":{"source":"apache","extensions":["wad"]},"application/x-dtbncx+xml":{"source":"apache","compressible":true,"extensions":["ncx"]},"application/x-dtbook+xml":{"source":"apache","compressible":true,"extensions":["dtb"]},"application/x-dtbresource+xml":{"source":"apache","compressible":true,"extensions":["res"]},"application/x-dvi":{"source":"apache","compressible":false,"extensions":["dvi"]},"application/x-envoy":{"source":"apache","extensions":["evy"]},"application/x-eva":{"source":"apache","extensions":["eva"]},"application/x-font-bdf":{"source":"apache","extensions":["bdf"]},"application/x-font-dos":{"source":"apache"},"application/x-font-framemaker":{"source":"apache"},"application/x-font-ghostscript":{"source":"apache","extensions":["gsf"]},"application/x-font-libgrx":{"source":"apache"},"application/x-font-linux-psf":{"source":"apache","extensions":["psf"]},"application/x-font-pcf":{"source":"apache","extensions":["pcf"]},"application/x-font-snf":{"source":"apache","extensions":["snf"]},"application/x-font-speedo":{"source":"apache"},"application/x-font-sunos-news":{"source":"apache"},"application/x-font-type1":{"source":"apache","extensions":["pfa","pfb","pfm","afm"]},"application/x-font-vfont":{"source":"apache"},"application/x-freearc":{"source":"apache","extensions":["arc"]},"application/x-futuresplash":{"source":"apache","extensions":["spl"]},"application/x-gca-compressed":{"source":"apache","extensions":["gca"]},"application/x-glulx":{"source":"apache","extensions":["ulx"]},"application/x-gnumeric":{"source":"apache","extensions":["gnumeric"]},"application/x-gramps-xml":{"source":"apache","extensions":["gramps"]},"application/x-gtar":{"source":"apache","extensions":["gtar"]},"application/x-gzip":{"source":"apache"},"application/x-hdf":{"source":"apache","extensions":["hdf"]},"application/x-httpd-php":{"compressible":true,"extensions":["php"]},"application/x-install-instructions":{"source":"apache","extensions":["install"]},"application/x-iso9660-image":{"source":"apache","extensions":["iso"]},"application/x-iwork-keynote-sffkey":{"extensions":["key"]},"application/x-iwork-numbers-sffnumbers":{"extensions":["numbers"]},"application/x-iwork-pages-sffpages":{"extensions":["pages"]},"application/x-java-archive-diff":{"source":"nginx","extensions":["jardiff"]},"application/x-java-jnlp-file":{"source":"apache","compressible":false,"extensions":["jnlp"]},"application/x-javascript":{"compressible":true},"application/x-keepass2":{"extensions":["kdbx"]},"application/x-latex":{"source":"apache","compressible":false,"extensions":["latex"]},"application/x-lua-bytecode":{"extensions":["luac"]},"application/x-lzh-compressed":{"source":"apache","extensions":["lzh","lha"]},"application/x-makeself":{"source":"nginx","extensions":["run"]},"application/x-mie":{"source":"apache","extensions":["mie"]},"application/x-mobipocket-ebook":{"source":"apache","extensions":["prc","mobi"]},"application/x-mpegurl":{"compressible":false},"application/x-ms-application":{"source":"apache","extensions":["application"]},"application/x-ms-shortcut":{"source":"apache","extensions":["lnk"]},"application/x-ms-wmd":{"source":"apache","extensions":["wmd"]},"application/x-ms-wmz":{"source":"apache","extensions":["wmz"]},"application/x-ms-xbap":{"source":"apache","extensions":["xbap"]},"application/x-msaccess":{"source":"apache","extensions":["mdb"]},"application/x-msbinder":{"source":"apache","extensions":["obd"]},"application/x-mscardfile":{"source":"apache","extensions":["crd"]},"application/x-msclip":{"source":"apache","extensions":["clp"]},"application/x-msdos-program":{"extensions":["exe"]},"application/x-msdownload":{"source":"apache","extensions":["exe","dll","com","bat","msi"]},"application/x-msmediaview":{"source":"apache","extensions":["mvb","m13","m14"]},"application/x-msmetafile":{"source":"apache","extensions":["wmf","wmz","emf","emz"]},"application/x-msmoney":{"source":"apache","extensions":["mny"]},"application/x-mspublisher":{"source":"apache","extensions":["pub"]},"application/x-msschedule":{"source":"apache","extensions":["scd"]},"application/x-msterminal":{"source":"apache","extensions":["trm"]},"application/x-mswrite":{"source":"apache","extensions":["wri"]},"application/x-netcdf":{"source":"apache","extensions":["nc","cdf"]},"application/x-ns-proxy-autoconfig":{"compressible":true,"extensions":["pac"]},"application/x-nzb":{"source":"apache","extensions":["nzb"]},"application/x-perl":{"source":"nginx","extensions":["pl","pm"]},"application/x-pilot":{"source":"nginx","extensions":["prc","pdb"]},"application/x-pkcs12":{"source":"apache","compressible":false,"extensions":["p12","pfx"]},"application/x-pkcs7-certificates":{"source":"apache","extensions":["p7b","spc"]},"application/x-pkcs7-certreqresp":{"source":"apache","extensions":["p7r"]},"application/x-pki-message":{"source":"iana"},"application/x-rar-compressed":{"source":"apache","compressible":false,"extensions":["rar"]},"application/x-redhat-package-manager":{"source":"nginx","extensions":["rpm"]},"application/x-research-info-systems":{"source":"apache","extensions":["ris"]},"application/x-sea":{"source":"nginx","extensions":["sea"]},"application/x-sh":{"source":"apache","compressible":true,"extensions":["sh"]},"application/x-shar":{"source":"apache","extensions":["shar"]},"application/x-shockwave-flash":{"source":"apache","compressible":false,"extensions":["swf"]},"application/x-silverlight-app":{"source":"apache","extensions":["xap"]},"application/x-sql":{"source":"apache","extensions":["sql"]},"application/x-stuffit":{"source":"apache","compressible":false,"extensions":["sit"]},"application/x-stuffitx":{"source":"apache","extensions":["sitx"]},"application/x-subrip":{"source":"apache","extensions":["srt"]},"application/x-sv4cpio":{"source":"apache","extensions":["sv4cpio"]},"application/x-sv4crc":{"source":"apache","extensions":["sv4crc"]},"application/x-t3vm-image":{"source":"apache","extensions":["t3"]},"application/x-tads":{"source":"apache","extensions":["gam"]},"application/x-tar":{"source":"apache","compressible":true,"extensions":["tar"]},"application/x-tcl":{"source":"apache","extensions":["tcl","tk"]},"application/x-tex":{"source":"apache","extensions":["tex"]},"application/x-tex-tfm":{"source":"apache","extensions":["tfm"]},"application/x-texinfo":{"source":"apache","extensions":["texinfo","texi"]},"application/x-tgif":{"source":"apache","extensions":["obj"]},"application/x-ustar":{"source":"apache","extensions":["ustar"]},"application/x-virtualbox-hdd":{"compressible":true,"extensions":["hdd"]},"application/x-virtualbox-ova":{"compressible":true,"extensions":["ova"]},"application/x-virtualbox-ovf":{"compressible":true,"extensions":["ovf"]},"application/x-virtualbox-vbox":{"compressible":true,"extensions":["vbox"]},"application/x-virtualbox-vbox-extpack":{"compressible":false,"extensions":["vbox-extpack"]},"application/x-virtualbox-vdi":{"compressible":true,"extensions":["vdi"]},"application/x-virtualbox-vhd":{"compressible":true,"extensions":["vhd"]},"application/x-virtualbox-vmdk":{"compressible":true,"extensions":["vmdk"]},"application/x-wais-source":{"source":"apache","extensions":["src"]},"application/x-web-app-manifest+json":{"compressible":true,"extensions":["webapp"]},"application/x-www-form-urlencoded":{"source":"iana","compressible":true},"application/x-x509-ca-cert":{"source":"iana","extensions":["der","crt","pem"]},"application/x-x509-ca-ra-cert":{"source":"iana"},"application/x-x509-next-ca-cert":{"source":"iana"},"application/x-xfig":{"source":"apache","extensions":["fig"]},"application/x-xliff+xml":{"source":"apache","compressible":true,"extensions":["xlf"]},"application/x-xpinstall":{"source":"apache","compressible":false,"extensions":["xpi"]},"application/x-xz":{"source":"apache","extensions":["xz"]},"application/x-zmachine":{"source":"apache","extensions":["z1","z2","z3","z4","z5","z6","z7","z8"]},"application/x400-bp":{"source":"iana"},"application/xacml+xml":{"source":"iana","compressible":true},"application/xaml+xml":{"source":"apache","compressible":true,"extensions":["xaml"]},"application/xcap-att+xml":{"source":"iana","compressible":true,"extensions":["xav"]},"application/xcap-caps+xml":{"source":"iana","compressible":true,"extensions":["xca"]},"application/xcap-diff+xml":{"source":"iana","compressible":true,"extensions":["xdf"]},"application/xcap-el+xml":{"source":"iana","compressible":true,"extensions":["xel"]},"application/xcap-error+xml":{"source":"iana","compressible":true},"application/xcap-ns+xml":{"source":"iana","compressible":true,"extensions":["xns"]},"application/xcon-conference-info+xml":{"source":"iana","compressible":true},"application/xcon-conference-info-diff+xml":{"source":"iana","compressible":true},"application/xenc+xml":{"source":"iana","compressible":true,"extensions":["xenc"]},"application/xhtml+xml":{"source":"iana","compressible":true,"extensions":["xhtml","xht"]},"application/xhtml-voice+xml":{"source":"apache","compressible":true},"application/xliff+xml":{"source":"iana","compressible":true,"extensions":["xlf"]},"application/xml":{"source":"iana","compressible":true,"extensions":["xml","xsl","xsd","rng"]},"application/xml-dtd":{"source":"iana","compressible":true,"extensions":["dtd"]},"application/xml-external-parsed-entity":{"source":"iana"},"application/xml-patch+xml":{"source":"iana","compressible":true},"application/xmpp+xml":{"source":"iana","compressible":true},"application/xop+xml":{"source":"iana","compressible":true,"extensions":["xop"]},"application/xproc+xml":{"source":"apache","compressible":true,"extensions":["xpl"]},"application/xslt+xml":{"source":"iana","compressible":true,"extensions":["xsl","xslt"]},"application/xspf+xml":{"source":"apache","compressible":true,"extensions":["xspf"]},"application/xv+xml":{"source":"iana","compressible":true,"extensions":["mxml","xhvml","xvml","xvm"]},"application/yang":{"source":"iana","extensions":["yang"]},"application/yang-data+json":{"source":"iana","compressible":true},"application/yang-data+xml":{"source":"iana","compressible":true},"application/yang-patch+json":{"source":"iana","compressible":true},"application/yang-patch+xml":{"source":"iana","compressible":true},"application/yin+xml":{"source":"iana","compressible":true,"extensions":["yin"]},"application/zip":{"source":"iana","compressible":false,"extensions":["zip"]},"application/zlib":{"source":"iana"},"application/zstd":{"source":"iana"},"audio/1d-interleaved-parityfec":{"source":"iana"},"audio/32kadpcm":{"source":"iana"},"audio/3gpp":{"source":"iana","compressible":false,"extensions":["3gpp"]},"audio/3gpp2":{"source":"iana"},"audio/aac":{"source":"iana"},"audio/ac3":{"source":"iana"},"audio/adpcm":{"source":"apache","extensions":["adp"]},"audio/amr":{"source":"iana","extensions":["amr"]},"audio/amr-wb":{"source":"iana"},"audio/amr-wb+":{"source":"iana"},"audio/aptx":{"source":"iana"},"audio/asc":{"source":"iana"},"audio/atrac-advanced-lossless":{"source":"iana"},"audio/atrac-x":{"source":"iana"},"audio/atrac3":{"source":"iana"},"audio/basic":{"source":"iana","compressible":false,"extensions":["au","snd"]},"audio/bv16":{"source":"iana"},"audio/bv32":{"source":"iana"},"audio/clearmode":{"source":"iana"},"audio/cn":{"source":"iana"},"audio/dat12":{"source":"iana"},"audio/dls":{"source":"iana"},"audio/dsr-es201108":{"source":"iana"},"audio/dsr-es202050":{"source":"iana"},"audio/dsr-es202211":{"source":"iana"},"audio/dsr-es202212":{"source":"iana"},"audio/dv":{"source":"iana"},"audio/dvi4":{"source":"iana"},"audio/eac3":{"source":"iana"},"audio/encaprtp":{"source":"iana"},"audio/evrc":{"source":"iana"},"audio/evrc-qcp":{"source":"iana"},"audio/evrc0":{"source":"iana"},"audio/evrc1":{"source":"iana"},"audio/evrcb":{"source":"iana"},"audio/evrcb0":{"source":"iana"},"audio/evrcb1":{"source":"iana"},"audio/evrcnw":{"source":"iana"},"audio/evrcnw0":{"source":"iana"},"audio/evrcnw1":{"source":"iana"},"audio/evrcwb":{"source":"iana"},"audio/evrcwb0":{"source":"iana"},"audio/evrcwb1":{"source":"iana"},"audio/evs":{"source":"iana"},"audio/flexfec":{"source":"iana"},"audio/fwdred":{"source":"iana"},"audio/g711-0":{"source":"iana"},"audio/g719":{"source":"iana"},"audio/g722":{"source":"iana"},"audio/g7221":{"source":"iana"},"audio/g723":{"source":"iana"},"audio/g726-16":{"source":"iana"},"audio/g726-24":{"source":"iana"},"audio/g726-32":{"source":"iana"},"audio/g726-40":{"source":"iana"},"audio/g728":{"source":"iana"},"audio/g729":{"source":"iana"},"audio/g7291":{"source":"iana"},"audio/g729d":{"source":"iana"},"audio/g729e":{"source":"iana"},"audio/gsm":{"source":"iana"},"audio/gsm-efr":{"source":"iana"},"audio/gsm-hr-08":{"source":"iana"},"audio/ilbc":{"source":"iana"},"audio/ip-mr_v2.5":{"source":"iana"},"audio/isac":{"source":"apache"},"audio/l16":{"source":"iana"},"audio/l20":{"source":"iana"},"audio/l24":{"source":"iana","compressible":false},"audio/l8":{"source":"iana"},"audio/lpc":{"source":"iana"},"audio/melp":{"source":"iana"},"audio/melp1200":{"source":"iana"},"audio/melp2400":{"source":"iana"},"audio/melp600":{"source":"iana"},"audio/mhas":{"source":"iana"},"audio/midi":{"source":"apache","extensions":["mid","midi","kar","rmi"]},"audio/mobile-xmf":{"source":"iana","extensions":["mxmf"]},"audio/mp3":{"compressible":false,"extensions":["mp3"]},"audio/mp4":{"source":"iana","compressible":false,"extensions":["m4a","mp4a"]},"audio/mp4a-latm":{"source":"iana"},"audio/mpa":{"source":"iana"},"audio/mpa-robust":{"source":"iana"},"audio/mpeg":{"source":"iana","compressible":false,"extensions":["mpga","mp2","mp2a","mp3","m2a","m3a"]},"audio/mpeg4-generic":{"source":"iana"},"audio/musepack":{"source":"apache"},"audio/ogg":{"source":"iana","compressible":false,"extensions":["oga","ogg","spx","opus"]},"audio/opus":{"source":"iana"},"audio/parityfec":{"source":"iana"},"audio/pcma":{"source":"iana"},"audio/pcma-wb":{"source":"iana"},"audio/pcmu":{"source":"iana"},"audio/pcmu-wb":{"source":"iana"},"audio/prs.sid":{"source":"iana"},"audio/qcelp":{"source":"iana"},"audio/raptorfec":{"source":"iana"},"audio/red":{"source":"iana"},"audio/rtp-enc-aescm128":{"source":"iana"},"audio/rtp-midi":{"source":"iana"},"audio/rtploopback":{"source":"iana"},"audio/rtx":{"source":"iana"},"audio/s3m":{"source":"apache","extensions":["s3m"]},"audio/scip":{"source":"iana"},"audio/silk":{"source":"apache","extensions":["sil"]},"audio/smv":{"source":"iana"},"audio/smv-qcp":{"source":"iana"},"audio/smv0":{"source":"iana"},"audio/sofa":{"source":"iana"},"audio/sp-midi":{"source":"iana"},"audio/speex":{"source":"iana"},"audio/t140c":{"source":"iana"},"audio/t38":{"source":"iana"},"audio/telephone-event":{"source":"iana"},"audio/tetra_acelp":{"source":"iana"},"audio/tetra_acelp_bb":{"source":"iana"},"audio/tone":{"source":"iana"},"audio/tsvcis":{"source":"iana"},"audio/uemclip":{"source":"iana"},"audio/ulpfec":{"source":"iana"},"audio/usac":{"source":"iana"},"audio/vdvi":{"source":"iana"},"audio/vmr-wb":{"source":"iana"},"audio/vnd.3gpp.iufp":{"source":"iana"},"audio/vnd.4sb":{"source":"iana"},"audio/vnd.audiokoz":{"source":"iana"},"audio/vnd.celp":{"source":"iana"},"audio/vnd.cisco.nse":{"source":"iana"},"audio/vnd.cmles.radio-events":{"source":"iana"},"audio/vnd.cns.anp1":{"source":"iana"},"audio/vnd.cns.inf1":{"source":"iana"},"audio/vnd.dece.audio":{"source":"iana","extensions":["uva","uvva"]},"audio/vnd.digital-winds":{"source":"iana","extensions":["eol"]},"audio/vnd.dlna.adts":{"source":"iana"},"audio/vnd.dolby.heaac.1":{"source":"iana"},"audio/vnd.dolby.heaac.2":{"source":"iana"},"audio/vnd.dolby.mlp":{"source":"iana"},"audio/vnd.dolby.mps":{"source":"iana"},"audio/vnd.dolby.pl2":{"source":"iana"},"audio/vnd.dolby.pl2x":{"source":"iana"},"audio/vnd.dolby.pl2z":{"source":"iana"},"audio/vnd.dolby.pulse.1":{"source":"iana"},"audio/vnd.dra":{"source":"iana","extensions":["dra"]},"audio/vnd.dts":{"source":"iana","extensions":["dts"]},"audio/vnd.dts.hd":{"source":"iana","extensions":["dtshd"]},"audio/vnd.dts.uhd":{"source":"iana"},"audio/vnd.dvb.file":{"source":"iana"},"audio/vnd.everad.plj":{"source":"iana"},"audio/vnd.hns.audio":{"source":"iana"},"audio/vnd.lucent.voice":{"source":"iana","extensions":["lvp"]},"audio/vnd.ms-playready.media.pya":{"source":"iana","extensions":["pya"]},"audio/vnd.nokia.mobile-xmf":{"source":"iana"},"audio/vnd.nortel.vbk":{"source":"iana"},"audio/vnd.nuera.ecelp4800":{"source":"iana","extensions":["ecelp4800"]},"audio/vnd.nuera.ecelp7470":{"source":"iana","extensions":["ecelp7470"]},"audio/vnd.nuera.ecelp9600":{"source":"iana","extensions":["ecelp9600"]},"audio/vnd.octel.sbc":{"source":"iana"},"audio/vnd.presonus.multitrack":{"source":"iana"},"audio/vnd.qcelp":{"source":"iana"},"audio/vnd.rhetorex.32kadpcm":{"source":"iana"},"audio/vnd.rip":{"source":"iana","extensions":["rip"]},"audio/vnd.rn-realaudio":{"compressible":false},"audio/vnd.sealedmedia.softseal.mpeg":{"source":"iana"},"audio/vnd.vmx.cvsd":{"source":"iana"},"audio/vnd.wave":{"compressible":false},"audio/vorbis":{"source":"iana","compressible":false},"audio/vorbis-config":{"source":"iana"},"audio/wav":{"compressible":false,"extensions":["wav"]},"audio/wave":{"compressible":false,"extensions":["wav"]},"audio/webm":{"source":"apache","compressible":false,"extensions":["weba"]},"audio/x-aac":{"source":"apache","compressible":false,"extensions":["aac"]},"audio/x-aiff":{"source":"apache","extensions":["aif","aiff","aifc"]},"audio/x-caf":{"source":"apache","compressible":false,"extensions":["caf"]},"audio/x-flac":{"source":"apache","extensions":["flac"]},"audio/x-m4a":{"source":"nginx","extensions":["m4a"]},"audio/x-matroska":{"source":"apache","extensions":["mka"]},"audio/x-mpegurl":{"source":"apache","extensions":["m3u"]},"audio/x-ms-wax":{"source":"apache","extensions":["wax"]},"audio/x-ms-wma":{"source":"apache","extensions":["wma"]},"audio/x-pn-realaudio":{"source":"apache","extensions":["ram","ra"]},"audio/x-pn-realaudio-plugin":{"source":"apache","extensions":["rmp"]},"audio/x-realaudio":{"source":"nginx","extensions":["ra"]},"audio/x-tta":{"source":"apache"},"audio/x-wav":{"source":"apache","extensions":["wav"]},"audio/xm":{"source":"apache","extensions":["xm"]},"chemical/x-cdx":{"source":"apache","extensions":["cdx"]},"chemical/x-cif":{"source":"apache","extensions":["cif"]},"chemical/x-cmdf":{"source":"apache","extensions":["cmdf"]},"chemical/x-cml":{"source":"apache","extensions":["cml"]},"chemical/x-csml":{"source":"apache","extensions":["csml"]},"chemical/x-pdb":{"source":"apache"},"chemical/x-xyz":{"source":"apache","extensions":["xyz"]},"font/collection":{"source":"iana","extensions":["ttc"]},"font/otf":{"source":"iana","compressible":true,"extensions":["otf"]},"font/sfnt":{"source":"iana"},"font/ttf":{"source":"iana","compressible":true,"extensions":["ttf"]},"font/woff":{"source":"iana","extensions":["woff"]},"font/woff2":{"source":"iana","extensions":["woff2"]},"image/aces":{"source":"iana","extensions":["exr"]},"image/apng":{"compressible":false,"extensions":["apng"]},"image/avci":{"source":"iana"},"image/avcs":{"source":"iana"},"image/avif":{"source":"iana","compressible":false,"extensions":["avif"]},"image/bmp":{"source":"iana","compressible":true,"extensions":["bmp"]},"image/cgm":{"source":"iana","extensions":["cgm"]},"image/dicom-rle":{"source":"iana","extensions":["drle"]},"image/emf":{"source":"iana","extensions":["emf"]},"image/fits":{"source":"iana","extensions":["fits"]},"image/g3fax":{"source":"iana","extensions":["g3"]},"image/gif":{"source":"iana","compressible":false,"extensions":["gif"]},"image/heic":{"source":"iana","extensions":["heic"]},"image/heic-sequence":{"source":"iana","extensions":["heics"]},"image/heif":{"source":"iana","extensions":["heif"]},"image/heif-sequence":{"source":"iana","extensions":["heifs"]},"image/hej2k":{"source":"iana","extensions":["hej2"]},"image/hsj2":{"source":"iana","extensions":["hsj2"]},"image/ief":{"source":"iana","extensions":["ief"]},"image/jls":{"source":"iana","extensions":["jls"]},"image/jp2":{"source":"iana","compressible":false,"extensions":["jp2","jpg2"]},"image/jpeg":{"source":"iana","compressible":false,"extensions":["jpeg","jpg","jpe"]},"image/jph":{"source":"iana","extensions":["jph"]},"image/jphc":{"source":"iana","extensions":["jhc"]},"image/jpm":{"source":"iana","compressible":false,"extensions":["jpm"]},"image/jpx":{"source":"iana","compressible":false,"extensions":["jpx","jpf"]},"image/jxr":{"source":"iana","extensions":["jxr"]},"image/jxra":{"source":"iana","extensions":["jxra"]},"image/jxrs":{"source":"iana","extensions":["jxrs"]},"image/jxs":{"source":"iana","extensions":["jxs"]},"image/jxsc":{"source":"iana","extensions":["jxsc"]},"image/jxsi":{"source":"iana","extensions":["jxsi"]},"image/jxss":{"source":"iana","extensions":["jxss"]},"image/ktx":{"source":"iana","extensions":["ktx"]},"image/ktx2":{"source":"iana","extensions":["ktx2"]},"image/naplps":{"source":"iana"},"image/pjpeg":{"compressible":false},"image/png":{"source":"iana","compressible":false,"extensions":["png"]},"image/prs.btif":{"source":"iana","extensions":["btif"]},"image/prs.pti":{"source":"iana","extensions":["pti"]},"image/pwg-raster":{"source":"iana"},"image/sgi":{"source":"apache","extensions":["sgi"]},"image/svg+xml":{"source":"iana","compressible":true,"extensions":["svg","svgz"]},"image/t38":{"source":"iana","extensions":["t38"]},"image/tiff":{"source":"iana","compressible":false,"extensions":["tif","tiff"]},"image/tiff-fx":{"source":"iana","extensions":["tfx"]},"image/vnd.adobe.photoshop":{"source":"iana","compressible":true,"extensions":["psd"]},"image/vnd.airzip.accelerator.azv":{"source":"iana","extensions":["azv"]},"image/vnd.cns.inf2":{"source":"iana"},"image/vnd.dece.graphic":{"source":"iana","extensions":["uvi","uvvi","uvg","uvvg"]},"image/vnd.djvu":{"source":"iana","extensions":["djvu","djv"]},"image/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"image/vnd.dwg":{"source":"iana","extensions":["dwg"]},"image/vnd.dxf":{"source":"iana","extensions":["dxf"]},"image/vnd.fastbidsheet":{"source":"iana","extensions":["fbs"]},"image/vnd.fpx":{"source":"iana","extensions":["fpx"]},"image/vnd.fst":{"source":"iana","extensions":["fst"]},"image/vnd.fujixerox.edmics-mmr":{"source":"iana","extensions":["mmr"]},"image/vnd.fujixerox.edmics-rlc":{"source":"iana","extensions":["rlc"]},"image/vnd.globalgraphics.pgb":{"source":"iana"},"image/vnd.microsoft.icon":{"source":"iana","compressible":true,"extensions":["ico"]},"image/vnd.mix":{"source":"iana"},"image/vnd.mozilla.apng":{"source":"iana"},"image/vnd.ms-dds":{"compressible":true,"extensions":["dds"]},"image/vnd.ms-modi":{"source":"iana","extensions":["mdi"]},"image/vnd.ms-photo":{"source":"apache","extensions":["wdp"]},"image/vnd.net-fpx":{"source":"iana","extensions":["npx"]},"image/vnd.pco.b16":{"source":"iana","extensions":["b16"]},"image/vnd.radiance":{"source":"iana"},"image/vnd.sealed.png":{"source":"iana"},"image/vnd.sealedmedia.softseal.gif":{"source":"iana"},"image/vnd.sealedmedia.softseal.jpg":{"source":"iana"},"image/vnd.svf":{"source":"iana"},"image/vnd.tencent.tap":{"source":"iana","extensions":["tap"]},"image/vnd.valve.source.texture":{"source":"iana","extensions":["vtf"]},"image/vnd.wap.wbmp":{"source":"iana","extensions":["wbmp"]},"image/vnd.xiff":{"source":"iana","extensions":["xif"]},"image/vnd.zbrush.pcx":{"source":"iana","extensions":["pcx"]},"image/webp":{"source":"apache","extensions":["webp"]},"image/wmf":{"source":"iana","extensions":["wmf"]},"image/x-3ds":{"source":"apache","extensions":["3ds"]},"image/x-cmu-raster":{"source":"apache","extensions":["ras"]},"image/x-cmx":{"source":"apache","extensions":["cmx"]},"image/x-freehand":{"source":"apache","extensions":["fh","fhc","fh4","fh5","fh7"]},"image/x-icon":{"source":"apache","compressible":true,"extensions":["ico"]},"image/x-jng":{"source":"nginx","extensions":["jng"]},"image/x-mrsid-image":{"source":"apache","extensions":["sid"]},"image/x-ms-bmp":{"source":"nginx","compressible":true,"extensions":["bmp"]},"image/x-pcx":{"source":"apache","extensions":["pcx"]},"image/x-pict":{"source":"apache","extensions":["pic","pct"]},"image/x-portable-anymap":{"source":"apache","extensions":["pnm"]},"image/x-portable-bitmap":{"source":"apache","extensions":["pbm"]},"image/x-portable-graymap":{"source":"apache","extensions":["pgm"]},"image/x-portable-pixmap":{"source":"apache","extensions":["ppm"]},"image/x-rgb":{"source":"apache","extensions":["rgb"]},"image/x-tga":{"source":"apache","extensions":["tga"]},"image/x-xbitmap":{"source":"apache","extensions":["xbm"]},"image/x-xcf":{"compressible":false},"image/x-xpixmap":{"source":"apache","extensions":["xpm"]},"image/x-xwindowdump":{"source":"apache","extensions":["xwd"]},"message/cpim":{"source":"iana"},"message/delivery-status":{"source":"iana"},"message/disposition-notification":{"source":"iana","extensions":["disposition-notification"]},"message/external-body":{"source":"iana"},"message/feedback-report":{"source":"iana"},"message/global":{"source":"iana","extensions":["u8msg"]},"message/global-delivery-status":{"source":"iana","extensions":["u8dsn"]},"message/global-disposition-notification":{"source":"iana","extensions":["u8mdn"]},"message/global-headers":{"source":"iana","extensions":["u8hdr"]},"message/http":{"source":"iana","compressible":false},"message/imdn+xml":{"source":"iana","compressible":true},"message/news":{"source":"iana"},"message/partial":{"source":"iana","compressible":false},"message/rfc822":{"source":"iana","compressible":true,"extensions":["eml","mime"]},"message/s-http":{"source":"iana"},"message/sip":{"source":"iana"},"message/sipfrag":{"source":"iana"},"message/tracking-status":{"source":"iana"},"message/vnd.si.simp":{"source":"iana"},"message/vnd.wfa.wsc":{"source":"iana","extensions":["wsc"]},"model/3mf":{"source":"iana","extensions":["3mf"]},"model/e57":{"source":"iana"},"model/gltf+json":{"source":"iana","compressible":true,"extensions":["gltf"]},"model/gltf-binary":{"source":"iana","compressible":true,"extensions":["glb"]},"model/iges":{"source":"iana","compressible":false,"extensions":["igs","iges"]},"model/mesh":{"source":"iana","compressible":false,"extensions":["msh","mesh","silo"]},"model/mtl":{"source":"iana","extensions":["mtl"]},"model/obj":{"source":"iana","extensions":["obj"]},"model/step":{"source":"iana"},"model/step+xml":{"source":"iana","compressible":true,"extensions":["stpx"]},"model/step+zip":{"source":"iana","compressible":false,"extensions":["stpz"]},"model/step-xml+zip":{"source":"iana","compressible":false,"extensions":["stpxz"]},"model/stl":{"source":"iana","extensions":["stl"]},"model/vnd.collada+xml":{"source":"iana","compressible":true,"extensions":["dae"]},"model/vnd.dwf":{"source":"iana","extensions":["dwf"]},"model/vnd.flatland.3dml":{"source":"iana"},"model/vnd.gdl":{"source":"iana","extensions":["gdl"]},"model/vnd.gs-gdl":{"source":"apache"},"model/vnd.gs.gdl":{"source":"iana"},"model/vnd.gtw":{"source":"iana","extensions":["gtw"]},"model/vnd.moml+xml":{"source":"iana","compressible":true},"model/vnd.mts":{"source":"iana","extensions":["mts"]},"model/vnd.opengex":{"source":"iana","extensions":["ogex"]},"model/vnd.parasolid.transmit.binary":{"source":"iana","extensions":["x_b"]},"model/vnd.parasolid.transmit.text":{"source":"iana","extensions":["x_t"]},"model/vnd.pytha.pyox":{"source":"iana"},"model/vnd.rosette.annotated-data-model":{"source":"iana"},"model/vnd.sap.vds":{"source":"iana","extensions":["vds"]},"model/vnd.usdz+zip":{"source":"iana","compressible":false,"extensions":["usdz"]},"model/vnd.valve.source.compiled-map":{"source":"iana","extensions":["bsp"]},"model/vnd.vtu":{"source":"iana","extensions":["vtu"]},"model/vrml":{"source":"iana","compressible":false,"extensions":["wrl","vrml"]},"model/x3d+binary":{"source":"apache","compressible":false,"extensions":["x3db","x3dbz"]},"model/x3d+fastinfoset":{"source":"iana","extensions":["x3db"]},"model/x3d+vrml":{"source":"apache","compressible":false,"extensions":["x3dv","x3dvz"]},"model/x3d+xml":{"source":"iana","compressible":true,"extensions":["x3d","x3dz"]},"model/x3d-vrml":{"source":"iana","extensions":["x3dv"]},"multipart/alternative":{"source":"iana","compressible":false},"multipart/appledouble":{"source":"iana"},"multipart/byteranges":{"source":"iana"},"multipart/digest":{"source":"iana"},"multipart/encrypted":{"source":"iana","compressible":false},"multipart/form-data":{"source":"iana","compressible":false},"multipart/header-set":{"source":"iana"},"multipart/mixed":{"source":"iana"},"multipart/multilingual":{"source":"iana"},"multipart/parallel":{"source":"iana"},"multipart/related":{"source":"iana","compressible":false},"multipart/report":{"source":"iana"},"multipart/signed":{"source":"iana","compressible":false},"multipart/vnd.bint.med-plus":{"source":"iana"},"multipart/voice-message":{"source":"iana"},"multipart/x-mixed-replace":{"source":"iana"},"text/1d-interleaved-parityfec":{"source":"iana"},"text/cache-manifest":{"source":"iana","compressible":true,"extensions":["appcache","manifest"]},"text/calendar":{"source":"iana","extensions":["ics","ifb"]},"text/calender":{"compressible":true},"text/cmd":{"compressible":true},"text/coffeescript":{"extensions":["coffee","litcoffee"]},"text/cql":{"source":"iana"},"text/cql-expression":{"source":"iana"},"text/cql-identifier":{"source":"iana"},"text/css":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["css"]},"text/csv":{"source":"iana","compressible":true,"extensions":["csv"]},"text/csv-schema":{"source":"iana"},"text/directory":{"source":"iana"},"text/dns":{"source":"iana"},"text/ecmascript":{"source":"iana"},"text/encaprtp":{"source":"iana"},"text/enriched":{"source":"iana"},"text/fhirpath":{"source":"iana"},"text/flexfec":{"source":"iana"},"text/fwdred":{"source":"iana"},"text/gff3":{"source":"iana"},"text/grammar-ref-list":{"source":"iana"},"text/html":{"source":"iana","compressible":true,"extensions":["html","htm","shtml"]},"text/jade":{"extensions":["jade"]},"text/javascript":{"source":"iana","compressible":true},"text/jcr-cnd":{"source":"iana"},"text/jsx":{"compressible":true,"extensions":["jsx"]},"text/less":{"compressible":true,"extensions":["less"]},"text/markdown":{"source":"iana","compressible":true,"extensions":["markdown","md"]},"text/mathml":{"source":"nginx","extensions":["mml"]},"text/mdx":{"compressible":true,"extensions":["mdx"]},"text/mizar":{"source":"iana"},"text/n3":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["n3"]},"text/parameters":{"source":"iana","charset":"UTF-8"},"text/parityfec":{"source":"iana"},"text/plain":{"source":"iana","compressible":true,"extensions":["txt","text","conf","def","list","log","in","ini"]},"text/provenance-notation":{"source":"iana","charset":"UTF-8"},"text/prs.fallenstein.rst":{"source":"iana"},"text/prs.lines.tag":{"source":"iana","extensions":["dsc"]},"text/prs.prop.logic":{"source":"iana"},"text/raptorfec":{"source":"iana"},"text/red":{"source":"iana"},"text/rfc822-headers":{"source":"iana"},"text/richtext":{"source":"iana","compressible":true,"extensions":["rtx"]},"text/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"text/rtp-enc-aescm128":{"source":"iana"},"text/rtploopback":{"source":"iana"},"text/rtx":{"source":"iana"},"text/sgml":{"source":"iana","extensions":["sgml","sgm"]},"text/shaclc":{"source":"iana"},"text/shex":{"source":"iana","extensions":["shex"]},"text/slim":{"extensions":["slim","slm"]},"text/spdx":{"source":"iana","extensions":["spdx"]},"text/strings":{"source":"iana"},"text/stylus":{"extensions":["stylus","styl"]},"text/t140":{"source":"iana"},"text/tab-separated-values":{"source":"iana","compressible":true,"extensions":["tsv"]},"text/troff":{"source":"iana","extensions":["t","tr","roff","man","me","ms"]},"text/turtle":{"source":"iana","charset":"UTF-8","extensions":["ttl"]},"text/ulpfec":{"source":"iana"},"text/uri-list":{"source":"iana","compressible":true,"extensions":["uri","uris","urls"]},"text/vcard":{"source":"iana","compressible":true,"extensions":["vcard"]},"text/vnd.a":{"source":"iana"},"text/vnd.abc":{"source":"iana"},"text/vnd.ascii-art":{"source":"iana"},"text/vnd.curl":{"source":"iana","extensions":["curl"]},"text/vnd.curl.dcurl":{"source":"apache","extensions":["dcurl"]},"text/vnd.curl.mcurl":{"source":"apache","extensions":["mcurl"]},"text/vnd.curl.scurl":{"source":"apache","extensions":["scurl"]},"text/vnd.debian.copyright":{"source":"iana","charset":"UTF-8"},"text/vnd.dmclientscript":{"source":"iana"},"text/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"text/vnd.esmertec.theme-descriptor":{"source":"iana","charset":"UTF-8"},"text/vnd.familysearch.gedcom":{"source":"iana","extensions":["ged"]},"text/vnd.ficlab.flt":{"source":"iana"},"text/vnd.fly":{"source":"iana","extensions":["fly"]},"text/vnd.fmi.flexstor":{"source":"iana","extensions":["flx"]},"text/vnd.gml":{"source":"iana"},"text/vnd.graphviz":{"source":"iana","extensions":["gv"]},"text/vnd.hans":{"source":"iana"},"text/vnd.hgl":{"source":"iana"},"text/vnd.in3d.3dml":{"source":"iana","extensions":["3dml"]},"text/vnd.in3d.spot":{"source":"iana","extensions":["spot"]},"text/vnd.iptc.newsml":{"source":"iana"},"text/vnd.iptc.nitf":{"source":"iana"},"text/vnd.latex-z":{"source":"iana"},"text/vnd.motorola.reflex":{"source":"iana"},"text/vnd.ms-mediapackage":{"source":"iana"},"text/vnd.net2phone.commcenter.command":{"source":"iana"},"text/vnd.radisys.msml-basic-layout":{"source":"iana"},"text/vnd.senx.warpscript":{"source":"iana"},"text/vnd.si.uricatalogue":{"source":"iana"},"text/vnd.sosi":{"source":"iana"},"text/vnd.sun.j2me.app-descriptor":{"source":"iana","charset":"UTF-8","extensions":["jad"]},"text/vnd.trolltech.linguist":{"source":"iana","charset":"UTF-8"},"text/vnd.wap.si":{"source":"iana"},"text/vnd.wap.sl":{"source":"iana"},"text/vnd.wap.wml":{"source":"iana","extensions":["wml"]},"text/vnd.wap.wmlscript":{"source":"iana","extensions":["wmls"]},"text/vtt":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["vtt"]},"text/x-asm":{"source":"apache","extensions":["s","asm"]},"text/x-c":{"source":"apache","extensions":["c","cc","cxx","cpp","h","hh","dic"]},"text/x-component":{"source":"nginx","extensions":["htc"]},"text/x-fortran":{"source":"apache","extensions":["f","for","f77","f90"]},"text/x-gwt-rpc":{"compressible":true},"text/x-handlebars-template":{"extensions":["hbs"]},"text/x-java-source":{"source":"apache","extensions":["java"]},"text/x-jquery-tmpl":{"compressible":true},"text/x-lua":{"extensions":["lua"]},"text/x-markdown":{"compressible":true,"extensions":["mkd"]},"text/x-nfo":{"source":"apache","extensions":["nfo"]},"text/x-opml":{"source":"apache","extensions":["opml"]},"text/x-org":{"compressible":true,"extensions":["org"]},"text/x-pascal":{"source":"apache","extensions":["p","pas"]},"text/x-processing":{"compressible":true,"extensions":["pde"]},"text/x-sass":{"extensions":["sass"]},"text/x-scss":{"extensions":["scss"]},"text/x-setext":{"source":"apache","extensions":["etx"]},"text/x-sfv":{"source":"apache","extensions":["sfv"]},"text/x-suse-ymp":{"compressible":true,"extensions":["ymp"]},"text/x-uuencode":{"source":"apache","extensions":["uu"]},"text/x-vcalendar":{"source":"apache","extensions":["vcs"]},"text/x-vcard":{"source":"apache","extensions":["vcf"]},"text/xml":{"source":"iana","compressible":true,"extensions":["xml"]},"text/xml-external-parsed-entity":{"source":"iana"},"text/yaml":{"compressible":true,"extensions":["yaml","yml"]},"video/1d-interleaved-parityfec":{"source":"iana"},"video/3gpp":{"source":"iana","extensions":["3gp","3gpp"]},"video/3gpp-tt":{"source":"iana"},"video/3gpp2":{"source":"iana","extensions":["3g2"]},"video/av1":{"source":"iana"},"video/bmpeg":{"source":"iana"},"video/bt656":{"source":"iana"},"video/celb":{"source":"iana"},"video/dv":{"source":"iana"},"video/encaprtp":{"source":"iana"},"video/ffv1":{"source":"iana"},"video/flexfec":{"source":"iana"},"video/h261":{"source":"iana","extensions":["h261"]},"video/h263":{"source":"iana","extensions":["h263"]},"video/h263-1998":{"source":"iana"},"video/h263-2000":{"source":"iana"},"video/h264":{"source":"iana","extensions":["h264"]},"video/h264-rcdo":{"source":"iana"},"video/h264-svc":{"source":"iana"},"video/h265":{"source":"iana"},"video/iso.segment":{"source":"iana","extensions":["m4s"]},"video/jpeg":{"source":"iana","extensions":["jpgv"]},"video/jpeg2000":{"source":"iana"},"video/jpm":{"source":"apache","extensions":["jpm","jpgm"]},"video/jxsv":{"source":"iana"},"video/mj2":{"source":"iana","extensions":["mj2","mjp2"]},"video/mp1s":{"source":"iana"},"video/mp2p":{"source":"iana"},"video/mp2t":{"source":"iana","extensions":["ts"]},"video/mp4":{"source":"iana","compressible":false,"extensions":["mp4","mp4v","mpg4"]},"video/mp4v-es":{"source":"iana"},"video/mpeg":{"source":"iana","compressible":false,"extensions":["mpeg","mpg","mpe","m1v","m2v"]},"video/mpeg4-generic":{"source":"iana"},"video/mpv":{"source":"iana"},"video/nv":{"source":"iana"},"video/ogg":{"source":"iana","compressible":false,"extensions":["ogv"]},"video/parityfec":{"source":"iana"},"video/pointer":{"source":"iana"},"video/quicktime":{"source":"iana","compressible":false,"extensions":["qt","mov"]},"video/raptorfec":{"source":"iana"},"video/raw":{"source":"iana"},"video/rtp-enc-aescm128":{"source":"iana"},"video/rtploopback":{"source":"iana"},"video/rtx":{"source":"iana"},"video/scip":{"source":"iana"},"video/smpte291":{"source":"iana"},"video/smpte292m":{"source":"iana"},"video/ulpfec":{"source":"iana"},"video/vc1":{"source":"iana"},"video/vc2":{"source":"iana"},"video/vnd.cctv":{"source":"iana"},"video/vnd.dece.hd":{"source":"iana","extensions":["uvh","uvvh"]},"video/vnd.dece.mobile":{"source":"iana","extensions":["uvm","uvvm"]},"video/vnd.dece.mp4":{"source":"iana"},"video/vnd.dece.pd":{"source":"iana","extensions":["uvp","uvvp"]},"video/vnd.dece.sd":{"source":"iana","extensions":["uvs","uvvs"]},"video/vnd.dece.video":{"source":"iana","extensions":["uvv","uvvv"]},"video/vnd.directv.mpeg":{"source":"iana"},"video/vnd.directv.mpeg-tts":{"source":"iana"},"video/vnd.dlna.mpeg-tts":{"source":"iana"},"video/vnd.dvb.file":{"source":"iana","extensions":["dvb"]},"video/vnd.fvt":{"source":"iana","extensions":["fvt"]},"video/vnd.hns.video":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.ttsavc":{"source":"iana"},"video/vnd.iptvforum.ttsmpeg2":{"source":"iana"},"video/vnd.motorola.video":{"source":"iana"},"video/vnd.motorola.videop":{"source":"iana"},"video/vnd.mpegurl":{"source":"iana","extensions":["mxu","m4u"]},"video/vnd.ms-playready.media.pyv":{"source":"iana","extensions":["pyv"]},"video/vnd.nokia.interleaved-multimedia":{"source":"iana"},"video/vnd.nokia.mp4vr":{"source":"iana"},"video/vnd.nokia.videovoip":{"source":"iana"},"video/vnd.objectvideo":{"source":"iana"},"video/vnd.radgamettools.bink":{"source":"iana"},"video/vnd.radgamettools.smacker":{"source":"iana"},"video/vnd.sealed.mpeg1":{"source":"iana"},"video/vnd.sealed.mpeg4":{"source":"iana"},"video/vnd.sealed.swf":{"source":"iana"},"video/vnd.sealedmedia.softseal.mov":{"source":"iana"},"video/vnd.uvvu.mp4":{"source":"iana","extensions":["uvu","uvvu"]},"video/vnd.vivo":{"source":"iana","extensions":["viv"]},"video/vnd.youtube.yt":{"source":"iana"},"video/vp8":{"source":"iana"},"video/vp9":{"source":"iana"},"video/webm":{"source":"apache","compressible":false,"extensions":["webm"]},"video/x-f4v":{"source":"apache","extensions":["f4v"]},"video/x-fli":{"source":"apache","extensions":["fli"]},"video/x-flv":{"source":"apache","compressible":false,"extensions":["flv"]},"video/x-m4v":{"source":"apache","extensions":["m4v"]},"video/x-matroska":{"source":"apache","compressible":false,"extensions":["mkv","mk3d","mks"]},"video/x-mng":{"source":"apache","extensions":["mng"]},"video/x-ms-asf":{"source":"apache","extensions":["asf","asx"]},"video/x-ms-vob":{"source":"apache","extensions":["vob"]},"video/x-ms-wm":{"source":"apache","extensions":["wm"]},"video/x-ms-wmv":{"source":"apache","compressible":false,"extensions":["wmv"]},"video/x-ms-wmx":{"source":"apache","extensions":["wmx"]},"video/x-ms-wvx":{"source":"apache","extensions":["wvx"]},"video/x-msvideo":{"source":"apache","extensions":["avi"]},"video/x-sgi-movie":{"source":"apache","extensions":["movie"]},"video/x-smv":{"source":"apache","extensions":["smv"]},"x-conference/x-cooltalk":{"source":"apache","extensions":["ice"]},"x-shader/x-fragment":{"compressible":true},"x-shader/x-vertex":{"compressible":true}}; + +/***/ }), +/* 513 */, +/* 514 */, +/* 515 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMImplementation; + + module.exports = XMLDOMImplementation = (function() { + function XMLDOMImplementation() {} + + XMLDOMImplementation.prototype.hasFeature = function(feature, version) { + return true; + }; + + XMLDOMImplementation.prototype.createDocumentType = function(qualifiedName, publicId, systemId) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createDocument = function(namespaceURI, qualifiedName, doctype) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createHTMLDocument = function(title) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLDOMImplementation; + + })(); + +}).call(this); + + +/***/ }), +/* 516 */, +/* 517 */, +/* 518 */, +/* 519 */, +/* 520 */, +/* 521 */, +/* 522 */, +/* 523 */, +/* 524 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMConfiguration, XMLDOMErrorHandler, XMLDOMStringList; + + XMLDOMErrorHandler = __webpack_require__(724); + + XMLDOMStringList = __webpack_require__(556); + + module.exports = XMLDOMConfiguration = (function() { + function XMLDOMConfiguration() { + var clonedSelf; + this.defaultParams = { + "canonical-form": false, + "cdata-sections": false, + "comments": false, + "datatype-normalization": false, + "element-content-whitespace": true, + "entities": true, + "error-handler": new XMLDOMErrorHandler(), + "infoset": true, + "validate-if-schema": false, + "namespaces": true, + "namespace-declarations": true, + "normalize-characters": false, + "schema-location": '', + "schema-type": '', + "split-cdata-sections": true, + "validate": false, + "well-formed": true + }; + this.params = clonedSelf = Object.create(this.defaultParams); + } + + Object.defineProperty(XMLDOMConfiguration.prototype, 'parameterNames', { + get: function() { + return new XMLDOMStringList(Object.keys(this.defaultParams)); + } + }); + + XMLDOMConfiguration.prototype.getParameter = function(name) { + if (this.params.hasOwnProperty(name)) { + return this.params[name]; + } else { + return null; + } + }; + + XMLDOMConfiguration.prototype.canSetParameter = function(name, value) { + return true; + }; + + XMLDOMConfiguration.prototype.setParameter = function(name, value) { + if (value != null) { + return this.params[name] = value; + } else { + return delete this.params[name]; + } + }; + + return XMLDOMConfiguration; + + })(); + +}).call(this); + + +/***/ }), +/* 525 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +var platform_1 = __webpack_require__(910); +var version_1 = __webpack_require__(830); +var semver_1 = __webpack_require__(987); +var major = version_1.VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error('@opentelemetry/api: All API registration versions must match'); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + version_1.VERSION + "."); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !semver_1.isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + version_1.VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map + +/***/ }), +/* 526 */, +/* 527 */, +/* 528 */, +/* 529 */, +/* 530 */, +/* 531 */, +/* 532 */, +/* 533 */, +/* 534 */, +/* 535 */, +/* 536 */, +/* 537 */, +/* 538 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; +//# sourceMappingURL=options.js.map + +/***/ }), +/* 539 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +const pm = __webpack_require__(950); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(413); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + ...((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + }), + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), +/* 540 */, +/* 541 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + None: 0, + OpenTag: 1, + InsideTag: 2, + CloseTag: 3 + }; + +}).call(this); + + +/***/ }), +/* 542 */, +/* 543 */, +/* 544 */, +/* 545 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map + +/***/ }), +/* 546 */, +/* 547 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var util = __webpack_require__(669); +var Stream = __webpack_require__(794).Stream; +var DelayedStream = __webpack_require__(152); + +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; + + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); + +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } + + return combinedStream; +}; + +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; + } + + this._handleErrors(stream); + + if (this.pauseStreams) { + stream.pause(); + } + } + + this._streams.push(stream); + return this; +}; + +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } + + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; + +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; + } + + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } + + this._pipeNext(stream); + }.bind(this)); +}; + +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; + + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + + var value = stream; + this.write(value); + this._getNext(); +}; + +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; + +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; + + +/***/ }), +/* 548 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __webpack_require__(87); +const fs_1 = __webpack_require__(747); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary'; +class MarkdownSummary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports markdown summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} markdown summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {MarkdownSummary} markdown summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {MarkdownSummary} markdown summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {MarkdownSummary} markdown summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {MarkdownSummary} markdown summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {MarkdownSummary} markdown summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {MarkdownSummary} markdown summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {MarkdownSummary} markdown summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {MarkdownSummary} markdown summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {MarkdownSummary} markdown summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {MarkdownSummary} markdown summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {MarkdownSummary} markdown summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {MarkdownSummary} markdown summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {MarkdownSummary} markdown summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {MarkdownSummary} markdown summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +// singleton export +exports.markdownSummary = new MarkdownSummary(); +//# sourceMappingURL=markdown-summary.js.map + +/***/ }), +/* 549 */, +/* 550 */, +/* 551 */, +/* 552 */, +/* 553 */, +/* 554 */, +/* 555 */, +/* 556 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMStringList; + + module.exports = XMLDOMStringList = (function() { + function XMLDOMStringList(arr) { + this.arr = arr || []; + } + + Object.defineProperty(XMLDOMStringList.prototype, 'length', { + get: function() { + return this.arr.length; + } + }); + + XMLDOMStringList.prototype.item = function(index) { + return this.arr[index] || null; + }; + + XMLDOMStringList.prototype.contains = function(str) { + return this.arr.indexOf(str) !== -1; + }; + + return XMLDOMStringList; + + })(); + +}).call(this); + + +/***/ }), +/* 557 */, +/* 558 */, +/* 559 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDOMConfiguration, XMLDOMImplementation, XMLDocument, XMLNode, XMLStringWriter, XMLStringifier, isPlainObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isPlainObject = __webpack_require__(582).isPlainObject; + + XMLDOMImplementation = __webpack_require__(515); + + XMLDOMConfiguration = __webpack_require__(524); + + XMLNode = __webpack_require__(257); + + NodeType = __webpack_require__(683); + + XMLStringifier = __webpack_require__(602); + + XMLStringWriter = __webpack_require__(750); + + module.exports = XMLDocument = (function(superClass) { + extend(XMLDocument, superClass); + + function XMLDocument(options) { + XMLDocument.__super__.constructor.call(this, null); + this.name = "#document"; + this.type = NodeType.Document; + this.documentURI = null; + this.domConfig = new XMLDOMConfiguration(); + options || (options = {}); + if (!options.writer) { + options.writer = new XMLStringWriter(); + } + this.options = options; + this.stringify = new XMLStringifier(options); + } + + Object.defineProperty(XMLDocument.prototype, 'implementation', { + value: new XMLDOMImplementation() + }); + + Object.defineProperty(XMLDocument.prototype, 'doctype', { + get: function() { + var child, i, len, ref; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.DocType) { + return child; + } + } + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'documentElement', { + get: function() { + return this.rootObject || null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'inputEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'strictErrorChecking', { + get: function() { + return false; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlEncoding', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].encoding; + } else { + return null; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlStandalone', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].standalone === 'yes'; + } else { + return false; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlVersion', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].version; + } else { + return "1.0"; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'URL', { + get: function() { + return this.documentURI; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'origin', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'compatMode', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'characterSet', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'contentType', { + get: function() { + return null; + } + }); + + XMLDocument.prototype.end = function(writer) { + var writerOptions; + writerOptions = {}; + if (!writer) { + writer = this.options.writer; + } else if (isPlainObject(writer)) { + writerOptions = writer; + writer = this.options.writer; + } + return writer.document(this, writer.filterOptions(writerOptions)); + }; + + XMLDocument.prototype.toString = function(options) { + return this.options.writer.document(this, this.options.writer.filterOptions(options)); + }; + + XMLDocument.prototype.createElement = function(tagName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createDocumentFragment = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTextNode = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createComment = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createCDATASection = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createProcessingInstruction = function(target, data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttribute = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEntityReference = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.importNode = function(importedNode, deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createElementNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttributeNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementById = function(elementId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.adoptNode = function(source) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.normalizeDocument = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.renameNode = function(node, namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEvent = function(eventInterface) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createRange = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createNodeIterator = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTreeWalker = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + return XMLDocument; + + })(XMLNode); + +}).call(this); + + +/***/ }), +/* 560 */, +/* 561 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map + +/***/ }), +/* 562 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*! + * Copyright (c) 2018, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +const psl = __webpack_require__(632); + +function getPublicSuffix(domain) { + return psl.get(domain); +} + +exports.getPublicSuffix = getPublicSuffix; + + +/***/ }), +/* 563 */, +/* 564 */, +/* 565 */, +/* 566 */ +/***/ (function(module) { + +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} + + +/***/ }), +/* 567 */, +/* 568 */, +/* 569 */, +/* 570 */, +/* 571 */, +/* 572 */, +/* 573 */, +/* 574 */, +/* 575 */, +/* 576 */, +/* 577 */, +/* 578 */, +/* 579 */, +/* 580 */, +/* 581 */, +/* 582 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var assign, getValue, isArray, isEmpty, isFunction, isObject, isPlainObject, + slice = [].slice, + hasProp = {}.hasOwnProperty; + + assign = function() { + var i, key, len, source, sources, target; + target = arguments[0], sources = 2 <= arguments.length ? slice.call(arguments, 1) : []; + if (isFunction(Object.assign)) { + Object.assign.apply(null, arguments); + } else { + for (i = 0, len = sources.length; i < len; i++) { + source = sources[i]; + if (source != null) { + for (key in source) { + if (!hasProp.call(source, key)) continue; + target[key] = source[key]; + } + } + } + } + return target; + }; + + isFunction = function(val) { + return !!val && Object.prototype.toString.call(val) === '[object Function]'; + }; + + isObject = function(val) { + var ref; + return !!val && ((ref = typeof val) === 'function' || ref === 'object'); + }; + + isArray = function(val) { + if (isFunction(Array.isArray)) { + return Array.isArray(val); + } else { + return Object.prototype.toString.call(val) === '[object Array]'; + } + }; + + isEmpty = function(val) { + var key; + if (isArray(val)) { + return !val.length; + } else { + for (key in val) { + if (!hasProp.call(val, key)) continue; + return false; + } + return true; + } + }; + + isPlainObject = function(val) { + var ctor, proto; + return isObject(val) && (proto = Object.getPrototypeOf(val)) && (ctor = proto.constructor) && (typeof ctor === 'function') && (ctor instanceof ctor) && (Function.prototype.toString.call(ctor) === Function.prototype.toString.call(Object)); + }; + + getValue = function(obj) { + if (isFunction(obj.valueOf)) { + return obj.valueOf(); + } else { + return obj; + } + }; + + module.exports.assign = assign; + + module.exports.isFunction = isFunction; + + module.exports.isObject = isObject; + + module.exports.isArray = isArray; + + module.exports.isEmpty = isEmpty; + + module.exports.isPlainObject = isPlainObject; + + module.exports.getValue = getValue; + +}).call(this); + + +/***/ }), +/* 583 */, +/* 584 */, +/* 585 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); + +var _v = _interopRequireDefault(__webpack_require__(173)); + +var _v2 = _interopRequireDefault(__webpack_require__(298)); + +var _v3 = _interopRequireDefault(__webpack_require__(606)); + +var _v4 = _interopRequireDefault(__webpack_require__(90)); + +var _nil = _interopRequireDefault(__webpack_require__(24)); + +var _version = _interopRequireDefault(__webpack_require__(104)); + +var _validate = _interopRequireDefault(__webpack_require__(676)); + +var _stringify = _interopRequireDefault(__webpack_require__(855)); + +var _parse = _interopRequireDefault(__webpack_require__(197)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), +/* 586 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span.js.map + +/***/ }), +/* 587 */, +/* 588 */, +/* 589 */, +/* 590 */, +/* 591 */, +/* 592 */, +/* 593 */, +/* 594 */, +/* 595 */, +/* 596 */, +/* 597 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(__webpack_require__(972)); +const internal_match_kind_1 = __webpack_require__(327); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map + +/***/ }), +/* 598 */, +/* 599 */, +/* 600 */, +/* 601 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOptions = void 0; +const core = __importStar(__webpack_require__(470)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), +/* 602 */ +/***/ (function(module) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringifier, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + hasProp = {}.hasOwnProperty; + + module.exports = XMLStringifier = (function() { + function XMLStringifier(options) { + this.assertLegalName = bind(this.assertLegalName, this); + this.assertLegalChar = bind(this.assertLegalChar, this); + var key, ref, value; + options || (options = {}); + this.options = options; + if (!this.options.version) { + this.options.version = '1.0'; + } + ref = options.stringify || {}; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this[key] = value; + } + } + + XMLStringifier.prototype.name = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalName('' + val || ''); + }; + + XMLStringifier.prototype.text = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.textEscape('' + val || '')); + }; + + XMLStringifier.prototype.cdata = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + val = val.replace(']]>', ']]]]>'); + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.comment = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/--/)) { + throw new Error("Comment text cannot contain double-hypen: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.raw = function(val) { + if (this.options.noValidation) { + return val; + } + return '' + val || ''; + }; + + XMLStringifier.prototype.attValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.attEscape(val = '' + val || '')); + }; + + XMLStringifier.prototype.insTarget = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.insValue = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/\?>/)) { + throw new Error("Invalid processing instruction value: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlVersion = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/1\.[0-9]+/)) { + throw new Error("Invalid version number: " + val); + } + return val; + }; + + XMLStringifier.prototype.xmlEncoding = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/^[A-Za-z](?:[A-Za-z0-9._-])*$/)) { + throw new Error("Invalid encoding: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlStandalone = function(val) { + if (this.options.noValidation) { + return val; + } + if (val) { + return "yes"; + } else { + return "no"; + } + }; + + XMLStringifier.prototype.dtdPubID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdSysID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdElementValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttType = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttDefault = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdEntityValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdNData = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.convertAttKey = '@'; + + XMLStringifier.prototype.convertPIKey = '?'; + + XMLStringifier.prototype.convertTextKey = '#text'; + + XMLStringifier.prototype.convertCDataKey = '#cdata'; + + XMLStringifier.prototype.convertCommentKey = '#comment'; + + XMLStringifier.prototype.convertRawKey = '#raw'; + + XMLStringifier.prototype.assertLegalChar = function(str) { + var regex, res; + if (this.options.noValidation) { + return str; + } + regex = ''; + if (this.options.version === '1.0') { + regex = /[\0-\x08\x0B\f\x0E-\x1F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } else if (this.options.version === '1.1') { + regex = /[\0\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } + return str; + }; + + XMLStringifier.prototype.assertLegalName = function(str) { + var regex; + if (this.options.noValidation) { + return str; + } + this.assertLegalChar(str); + regex = /^([:A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])([\x2D\.0-:A-Z_a-z\xB7\xC0-\xD6\xD8-\xF6\xF8-\u037D\u037F-\u1FFF\u200C\u200D\u203F\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])*$/; + if (!str.match(regex)) { + throw new Error("Invalid character in name"); + } + return str; + }; + + XMLStringifier.prototype.textEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(//g, '>').replace(/\r/g, ' '); + }; + + XMLStringifier.prototype.attEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(/= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), +/* 609 */, +/* 610 */, +/* 611 */, +/* 612 */, +/* 613 */, +/* 614 */ +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), +/* 615 */, +/* 616 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(__webpack_require__(417)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; + +/***/ }), +/* 617 */, +/* 618 */, +/* 619 */, +/* 620 */, +/* 621 */ +/***/ (function(module) { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), +/* 622 */ +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), +/* 623 */, +/* 624 */, +/* 625 */, +/* 626 */, +/* 627 */, +/* 628 */, +/* 629 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var invalid_span_constants_1 = __webpack_require__(435); +var NonRecordingSpan_1 = __webpack_require__(437); +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map + +/***/ }), +/* 630 */, +/* 631 */ +/***/ (function(module) { + +module.exports = require("net"); + +/***/ }), +/* 632 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ + + + +var Punycode = __webpack_require__(815); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = __webpack_require__(50).map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + + +/***/ }), +/* 633 */, +/* 634 */, +/* 635 */, +/* 636 */, +/* 637 */, +/* 638 */, +/* 639 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLCharacterData, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = __webpack_require__(257); + + module.exports = XMLCharacterData = (function(superClass) { + extend(XMLCharacterData, superClass); + + function XMLCharacterData(parent) { + XMLCharacterData.__super__.constructor.call(this, parent); + this.value = ''; + } + + Object.defineProperty(XMLCharacterData.prototype, 'data', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'length', { + get: function() { + return this.value.length; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'textContent', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + XMLCharacterData.prototype.clone = function() { + return Object.create(this); + }; + + XMLCharacterData.prototype.substringData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.appendData = function(arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.insertData = function(offset, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.deleteData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.replaceData = function(offset, count, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.isEqualNode = function(node) { + if (!XMLCharacterData.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.data !== this.data) { + return false; + } + return true; + }; + + return XMLCharacterData; + + })(XMLNode); + +}).call(this); + + +/***/ }), +/* 640 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), +/* 641 */, +/* 642 */, +/* 643 */, +/* 644 */, +/* 645 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = __webpack_require__(794).Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = __webpack_require__(304).StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // +``` + +This script is browserified and wrapped in a [umd](https://github.com/umdjs/umd) +wrapper so you should be able to use it standalone or together with a module +loader. + +## API + +### `psl.parse(domain)` + +Parse domain based on Public Suffix List. Returns an `Object` with the following +properties: + +* `tld`: Top level domain (this is the _public suffix_). +* `sld`: Second level domain (the first private part of the domain name). +* `domain`: The domain name is the `sld` + `tld`. +* `subdomain`: Optional parts left of the domain. + +#### Example: + +```js +var psl = require('psl'); + +// Parse domain without subdomain +var parsed = psl.parse('google.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'google' +console.log(parsed.domain); // 'google.com' +console.log(parsed.subdomain); // null + +// Parse domain with subdomain +var parsed = psl.parse('www.google.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'google' +console.log(parsed.domain); // 'google.com' +console.log(parsed.subdomain); // 'www' + +// Parse domain with nested subdomains +var parsed = psl.parse('a.b.c.d.foo.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'foo' +console.log(parsed.domain); // 'foo.com' +console.log(parsed.subdomain); // 'a.b.c.d' +``` + +### `psl.get(domain)` + +Get domain name, `sld` + `tld`. Returns `null` if not valid. + +#### Example: + +```js +var psl = require('psl'); + +// null input. +psl.get(null); // null + +// Mixed case. +psl.get('COM'); // null +psl.get('example.COM'); // 'example.com' +psl.get('WwW.example.COM'); // 'example.com' + +// Unlisted TLD. +psl.get('example'); // null +psl.get('example.example'); // 'example.example' +psl.get('b.example.example'); // 'example.example' +psl.get('a.b.example.example'); // 'example.example' + +// TLD with only 1 rule. +psl.get('biz'); // null +psl.get('domain.biz'); // 'domain.biz' +psl.get('b.domain.biz'); // 'domain.biz' +psl.get('a.b.domain.biz'); // 'domain.biz' + +// TLD with some 2-level rules. +psl.get('uk.com'); // null); +psl.get('example.uk.com'); // 'example.uk.com'); +psl.get('b.example.uk.com'); // 'example.uk.com'); + +// More complex TLD. +psl.get('c.kobe.jp'); // null +psl.get('b.c.kobe.jp'); // 'b.c.kobe.jp' +psl.get('a.b.c.kobe.jp'); // 'b.c.kobe.jp' +psl.get('city.kobe.jp'); // 'city.kobe.jp' +psl.get('www.city.kobe.jp'); // 'city.kobe.jp' + +// IDN labels. +psl.get('食狮.com.cn'); // '食狮.com.cn' +psl.get('食狮.公司.cn'); // '食狮.公司.cn' +psl.get('www.食狮.公司.cn'); // '食狮.公司.cn' + +// Same as above, but punycoded. +psl.get('xn--85x722f.com.cn'); // 'xn--85x722f.com.cn' +psl.get('xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' +psl.get('www.xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' +``` + +### `psl.isValid(domain)` + +Check whether a domain has a valid Public Suffix. Returns a `Boolean` indicating +whether the domain has a valid Public Suffix. + +#### Example + +```js +var psl = require('psl'); + +psl.isValid('google.com'); // true +psl.isValid('www.google.com'); // true +psl.isValid('x.yz'); // false +``` + + +## Testing and Building + +Test are written using [`mocha`](https://mochajs.org/) and can be +run in two different environments: `node` and `phantomjs`. + +```sh +# This will run `eslint`, `mocha` and `karma`. +npm test + +# Individual test environments +# Run tests in node only. +./node_modules/.bin/mocha test +# Run tests in phantomjs only. +./node_modules/.bin/karma start ./karma.conf.js --single-run + +# Build data (parse raw list) and create dist files +npm run build +``` + +Feel free to fork if you see possible improvements! + + +## Acknowledgements + +* Mozilla Foundation's [Public Suffix List](https://publicsuffix.org/) +* Thanks to Rob Stradling of [Comodo](https://www.comodo.com/) for providing + test data. +* Inspired by [weppos/publicsuffix-ruby](https://github.com/weppos/publicsuffix-ruby) + + +## License + +The MIT License (MIT) + +Copyright (c) 2017 Lupo Montero + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/psl/browserstack-logo.svg b/node_modules/psl/browserstack-logo.svg new file mode 100644 index 00000000..195f64d2 --- /dev/null +++ b/node_modules/psl/browserstack-logo.svg @@ -0,0 +1,90 @@ + + + + +Browserstack-logo-white + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/psl/data/rules.json b/node_modules/psl/data/rules.json new file mode 100644 index 00000000..e19abdc8 --- /dev/null +++ b/node_modules/psl/data/rules.json @@ -0,0 +1,8834 @@ +[ +"ac", +"com.ac", +"edu.ac", +"gov.ac", +"net.ac", +"mil.ac", +"org.ac", +"ad", +"nom.ad", +"ae", +"co.ae", +"net.ae", +"org.ae", +"sch.ae", +"ac.ae", +"gov.ae", +"mil.ae", +"aero", +"accident-investigation.aero", +"accident-prevention.aero", +"aerobatic.aero", +"aeroclub.aero", +"aerodrome.aero", +"agents.aero", +"aircraft.aero", +"airline.aero", +"airport.aero", +"air-surveillance.aero", +"airtraffic.aero", +"air-traffic-control.aero", +"ambulance.aero", +"amusement.aero", +"association.aero", +"author.aero", +"ballooning.aero", +"broker.aero", +"caa.aero", +"cargo.aero", +"catering.aero", +"certification.aero", +"championship.aero", +"charter.aero", +"civilaviation.aero", +"club.aero", +"conference.aero", +"consultant.aero", +"consulting.aero", +"control.aero", +"council.aero", +"crew.aero", +"design.aero", +"dgca.aero", +"educator.aero", +"emergency.aero", +"engine.aero", +"engineer.aero", +"entertainment.aero", +"equipment.aero", +"exchange.aero", +"express.aero", +"federation.aero", +"flight.aero", +"freight.aero", +"fuel.aero", +"gliding.aero", +"government.aero", +"groundhandling.aero", +"group.aero", +"hanggliding.aero", +"homebuilt.aero", +"insurance.aero", +"journal.aero", +"journalist.aero", +"leasing.aero", +"logistics.aero", +"magazine.aero", +"maintenance.aero", +"media.aero", +"microlight.aero", +"modelling.aero", +"navigation.aero", +"parachuting.aero", +"paragliding.aero", +"passenger-association.aero", +"pilot.aero", +"press.aero", +"production.aero", +"recreation.aero", +"repbody.aero", +"res.aero", +"research.aero", +"rotorcraft.aero", +"safety.aero", +"scientist.aero", +"services.aero", +"show.aero", +"skydiving.aero", +"software.aero", +"student.aero", +"trader.aero", +"trading.aero", +"trainer.aero", +"union.aero", +"workinggroup.aero", +"works.aero", +"af", +"gov.af", +"com.af", +"org.af", +"net.af", +"edu.af", +"ag", +"com.ag", +"org.ag", +"net.ag", +"co.ag", +"nom.ag", +"ai", +"off.ai", +"com.ai", +"net.ai", +"org.ai", +"al", +"com.al", +"edu.al", +"gov.al", +"mil.al", +"net.al", +"org.al", +"am", +"co.am", +"com.am", +"commune.am", +"net.am", +"org.am", +"ao", +"ed.ao", +"gv.ao", +"og.ao", +"co.ao", +"pb.ao", +"it.ao", +"aq", +"ar", +"com.ar", +"edu.ar", +"gob.ar", +"gov.ar", +"int.ar", +"mil.ar", +"musica.ar", +"net.ar", +"org.ar", +"tur.ar", +"arpa", +"e164.arpa", +"in-addr.arpa", +"ip6.arpa", +"iris.arpa", +"uri.arpa", +"urn.arpa", +"as", +"gov.as", +"asia", +"at", +"ac.at", +"co.at", +"gv.at", +"or.at", +"au", +"com.au", +"net.au", +"org.au", +"edu.au", +"gov.au", +"asn.au", +"id.au", +"info.au", +"conf.au", +"oz.au", +"act.au", +"nsw.au", +"nt.au", +"qld.au", +"sa.au", +"tas.au", +"vic.au", +"wa.au", +"act.edu.au", +"catholic.edu.au", +"nsw.edu.au", +"nt.edu.au", +"qld.edu.au", +"sa.edu.au", +"tas.edu.au", +"vic.edu.au", +"wa.edu.au", +"qld.gov.au", +"sa.gov.au", +"tas.gov.au", +"vic.gov.au", +"wa.gov.au", +"education.tas.edu.au", +"schools.nsw.edu.au", +"aw", +"com.aw", +"ax", +"az", +"com.az", +"net.az", +"int.az", +"gov.az", +"org.az", +"edu.az", +"info.az", +"pp.az", +"mil.az", +"name.az", +"pro.az", +"biz.az", +"ba", +"com.ba", +"edu.ba", +"gov.ba", +"mil.ba", +"net.ba", +"org.ba", +"bb", +"biz.bb", +"co.bb", +"com.bb", +"edu.bb", +"gov.bb", +"info.bb", +"net.bb", +"org.bb", +"store.bb", +"tv.bb", +"*.bd", +"be", +"ac.be", +"bf", +"gov.bf", +"bg", +"a.bg", +"b.bg", +"c.bg", +"d.bg", +"e.bg", +"f.bg", +"g.bg", +"h.bg", +"i.bg", +"j.bg", +"k.bg", +"l.bg", +"m.bg", +"n.bg", +"o.bg", +"p.bg", +"q.bg", +"r.bg", +"s.bg", +"t.bg", +"u.bg", +"v.bg", +"w.bg", +"x.bg", +"y.bg", +"z.bg", +"0.bg", +"1.bg", +"2.bg", +"3.bg", +"4.bg", +"5.bg", +"6.bg", +"7.bg", +"8.bg", +"9.bg", +"bh", +"com.bh", +"edu.bh", +"net.bh", +"org.bh", +"gov.bh", +"bi", +"co.bi", +"com.bi", +"edu.bi", +"or.bi", +"org.bi", +"biz", +"bj", +"asso.bj", +"barreau.bj", +"gouv.bj", +"bm", +"com.bm", +"edu.bm", +"gov.bm", +"net.bm", +"org.bm", +"bn", +"com.bn", +"edu.bn", +"gov.bn", +"net.bn", +"org.bn", +"bo", +"com.bo", +"edu.bo", +"gob.bo", +"int.bo", +"org.bo", +"net.bo", +"mil.bo", +"tv.bo", +"web.bo", +"academia.bo", +"agro.bo", +"arte.bo", +"blog.bo", +"bolivia.bo", +"ciencia.bo", +"cooperativa.bo", +"democracia.bo", +"deporte.bo", +"ecologia.bo", +"economia.bo", +"empresa.bo", +"indigena.bo", +"industria.bo", +"info.bo", +"medicina.bo", +"movimiento.bo", +"musica.bo", +"natural.bo", +"nombre.bo", +"noticias.bo", +"patria.bo", +"politica.bo", +"profesional.bo", +"plurinacional.bo", +"pueblo.bo", +"revista.bo", +"salud.bo", +"tecnologia.bo", +"tksat.bo", +"transporte.bo", +"wiki.bo", +"br", +"9guacu.br", +"abc.br", +"adm.br", +"adv.br", +"agr.br", +"aju.br", +"am.br", +"anani.br", +"aparecida.br", +"arq.br", +"art.br", +"ato.br", +"b.br", +"barueri.br", +"belem.br", +"bhz.br", +"bio.br", +"blog.br", +"bmd.br", +"boavista.br", +"bsb.br", +"campinagrande.br", +"campinas.br", +"caxias.br", +"cim.br", +"cng.br", +"cnt.br", +"com.br", +"contagem.br", +"coop.br", +"cri.br", +"cuiaba.br", +"curitiba.br", +"def.br", +"ecn.br", +"eco.br", +"edu.br", +"emp.br", +"eng.br", +"esp.br", +"etc.br", +"eti.br", +"far.br", +"feira.br", +"flog.br", +"floripa.br", +"fm.br", +"fnd.br", +"fortal.br", +"fot.br", +"foz.br", +"fst.br", +"g12.br", +"ggf.br", +"goiania.br", +"gov.br", +"ac.gov.br", +"al.gov.br", +"am.gov.br", +"ap.gov.br", +"ba.gov.br", +"ce.gov.br", +"df.gov.br", +"es.gov.br", +"go.gov.br", +"ma.gov.br", +"mg.gov.br", +"ms.gov.br", +"mt.gov.br", +"pa.gov.br", +"pb.gov.br", +"pe.gov.br", +"pi.gov.br", +"pr.gov.br", +"rj.gov.br", +"rn.gov.br", +"ro.gov.br", +"rr.gov.br", +"rs.gov.br", +"sc.gov.br", +"se.gov.br", +"sp.gov.br", +"to.gov.br", +"gru.br", +"imb.br", +"ind.br", +"inf.br", +"jab.br", +"jampa.br", +"jdf.br", +"joinville.br", +"jor.br", +"jus.br", +"leg.br", +"lel.br", +"londrina.br", +"macapa.br", +"maceio.br", +"manaus.br", +"maringa.br", +"mat.br", +"med.br", +"mil.br", +"morena.br", +"mp.br", +"mus.br", +"natal.br", +"net.br", +"niteroi.br", +"*.nom.br", +"not.br", +"ntr.br", +"odo.br", +"ong.br", +"org.br", +"osasco.br", +"palmas.br", +"poa.br", +"ppg.br", +"pro.br", +"psc.br", +"psi.br", +"pvh.br", +"qsl.br", +"radio.br", +"rec.br", +"recife.br", +"ribeirao.br", +"rio.br", +"riobranco.br", +"riopreto.br", +"salvador.br", +"sampa.br", +"santamaria.br", +"santoandre.br", +"saobernardo.br", +"saogonca.br", +"sjc.br", +"slg.br", +"slz.br", +"sorocaba.br", +"srv.br", +"taxi.br", +"tc.br", +"teo.br", +"the.br", +"tmp.br", +"trd.br", +"tur.br", +"tv.br", +"udi.br", +"vet.br", +"vix.br", +"vlog.br", +"wiki.br", +"zlg.br", +"bs", +"com.bs", +"net.bs", +"org.bs", +"edu.bs", +"gov.bs", +"bt", +"com.bt", +"edu.bt", +"gov.bt", +"net.bt", +"org.bt", +"bv", +"bw", +"co.bw", +"org.bw", +"by", +"gov.by", +"mil.by", +"com.by", +"of.by", +"bz", +"com.bz", +"net.bz", +"org.bz", +"edu.bz", +"gov.bz", +"ca", +"ab.ca", +"bc.ca", +"mb.ca", +"nb.ca", +"nf.ca", +"nl.ca", +"ns.ca", +"nt.ca", +"nu.ca", +"on.ca", +"pe.ca", +"qc.ca", +"sk.ca", +"yk.ca", +"gc.ca", +"cat", +"cc", +"cd", +"gov.cd", +"cf", +"cg", +"ch", +"ci", +"org.ci", +"or.ci", +"com.ci", +"co.ci", +"edu.ci", +"ed.ci", +"ac.ci", +"net.ci", +"go.ci", +"asso.ci", +"aéroport.ci", +"int.ci", +"presse.ci", +"md.ci", +"gouv.ci", +"*.ck", +"!www.ck", +"cl", +"aprendemas.cl", +"co.cl", +"gob.cl", +"gov.cl", +"mil.cl", +"cm", +"co.cm", +"com.cm", +"gov.cm", +"net.cm", +"cn", +"ac.cn", +"com.cn", +"edu.cn", +"gov.cn", +"net.cn", +"org.cn", +"mil.cn", +"公司.cn", +"网络.cn", +"網絡.cn", +"ah.cn", +"bj.cn", +"cq.cn", +"fj.cn", +"gd.cn", +"gs.cn", +"gz.cn", +"gx.cn", +"ha.cn", +"hb.cn", +"he.cn", +"hi.cn", +"hl.cn", +"hn.cn", +"jl.cn", +"js.cn", +"jx.cn", +"ln.cn", +"nm.cn", +"nx.cn", +"qh.cn", +"sc.cn", +"sd.cn", +"sh.cn", +"sn.cn", +"sx.cn", +"tj.cn", +"xj.cn", +"xz.cn", +"yn.cn", +"zj.cn", +"hk.cn", +"mo.cn", +"tw.cn", +"co", +"arts.co", +"com.co", +"edu.co", +"firm.co", +"gov.co", +"info.co", +"int.co", +"mil.co", +"net.co", +"nom.co", +"org.co", +"rec.co", +"web.co", +"com", +"coop", +"cr", +"ac.cr", +"co.cr", +"ed.cr", +"fi.cr", +"go.cr", +"or.cr", +"sa.cr", +"cu", +"com.cu", +"edu.cu", +"org.cu", +"net.cu", +"gov.cu", +"inf.cu", +"cv", +"cw", +"com.cw", +"edu.cw", +"net.cw", +"org.cw", +"cx", +"gov.cx", +"cy", +"ac.cy", +"biz.cy", +"com.cy", +"ekloges.cy", +"gov.cy", +"ltd.cy", +"name.cy", +"net.cy", +"org.cy", +"parliament.cy", +"press.cy", +"pro.cy", +"tm.cy", +"cz", +"de", +"dj", +"dk", +"dm", +"com.dm", +"net.dm", +"org.dm", +"edu.dm", +"gov.dm", +"do", +"art.do", +"com.do", +"edu.do", +"gob.do", +"gov.do", +"mil.do", +"net.do", +"org.do", +"sld.do", +"web.do", +"dz", +"com.dz", +"org.dz", +"net.dz", +"gov.dz", +"edu.dz", +"asso.dz", +"pol.dz", +"art.dz", +"ec", +"com.ec", +"info.ec", +"net.ec", +"fin.ec", +"k12.ec", +"med.ec", +"pro.ec", +"org.ec", +"edu.ec", +"gov.ec", +"gob.ec", +"mil.ec", +"edu", +"ee", +"edu.ee", +"gov.ee", +"riik.ee", +"lib.ee", +"med.ee", +"com.ee", +"pri.ee", +"aip.ee", +"org.ee", +"fie.ee", +"eg", +"com.eg", +"edu.eg", +"eun.eg", +"gov.eg", +"mil.eg", +"name.eg", +"net.eg", +"org.eg", +"sci.eg", +"*.er", +"es", +"com.es", +"nom.es", +"org.es", +"gob.es", +"edu.es", +"et", +"com.et", +"gov.et", +"org.et", +"edu.et", +"biz.et", +"name.et", +"info.et", +"net.et", +"eu", +"fi", +"aland.fi", +"fj", +"ac.fj", +"biz.fj", +"com.fj", +"gov.fj", +"info.fj", +"mil.fj", +"name.fj", +"net.fj", +"org.fj", +"pro.fj", +"*.fk", +"fm", +"fo", +"fr", +"asso.fr", +"com.fr", +"gouv.fr", +"nom.fr", +"prd.fr", +"tm.fr", +"aeroport.fr", +"avocat.fr", +"avoues.fr", +"cci.fr", +"chambagri.fr", +"chirurgiens-dentistes.fr", +"experts-comptables.fr", +"geometre-expert.fr", +"greta.fr", +"huissier-justice.fr", +"medecin.fr", +"notaires.fr", +"pharmacien.fr", +"port.fr", +"veterinaire.fr", +"ga", +"gb", +"gd", +"ge", +"com.ge", +"edu.ge", +"gov.ge", +"org.ge", +"mil.ge", +"net.ge", +"pvt.ge", +"gf", +"gg", +"co.gg", +"net.gg", +"org.gg", +"gh", +"com.gh", +"edu.gh", +"gov.gh", +"org.gh", +"mil.gh", +"gi", +"com.gi", +"ltd.gi", +"gov.gi", +"mod.gi", +"edu.gi", +"org.gi", +"gl", +"co.gl", +"com.gl", +"edu.gl", +"net.gl", +"org.gl", +"gm", +"gn", +"ac.gn", +"com.gn", +"edu.gn", +"gov.gn", +"org.gn", +"net.gn", +"gov", +"gp", +"com.gp", +"net.gp", +"mobi.gp", +"edu.gp", +"org.gp", +"asso.gp", +"gq", +"gr", +"com.gr", +"edu.gr", +"net.gr", +"org.gr", +"gov.gr", +"gs", +"gt", +"com.gt", +"edu.gt", +"gob.gt", +"ind.gt", +"mil.gt", +"net.gt", +"org.gt", +"gu", +"com.gu", +"edu.gu", +"gov.gu", +"guam.gu", +"info.gu", +"net.gu", +"org.gu", +"web.gu", +"gw", +"gy", +"co.gy", +"com.gy", +"edu.gy", +"gov.gy", +"net.gy", +"org.gy", +"hk", +"com.hk", +"edu.hk", +"gov.hk", +"idv.hk", +"net.hk", +"org.hk", +"公司.hk", +"教育.hk", +"敎育.hk", +"政府.hk", +"個人.hk", +"个人.hk", +"箇人.hk", +"網络.hk", +"网络.hk", +"组織.hk", +"網絡.hk", +"网絡.hk", +"组织.hk", +"組織.hk", +"組织.hk", +"hm", +"hn", +"com.hn", +"edu.hn", +"org.hn", +"net.hn", +"mil.hn", +"gob.hn", +"hr", +"iz.hr", +"from.hr", +"name.hr", +"com.hr", +"ht", +"com.ht", +"shop.ht", +"firm.ht", +"info.ht", +"adult.ht", +"net.ht", +"pro.ht", +"org.ht", +"med.ht", +"art.ht", +"coop.ht", +"pol.ht", +"asso.ht", +"edu.ht", +"rel.ht", +"gouv.ht", +"perso.ht", +"hu", +"co.hu", +"info.hu", +"org.hu", +"priv.hu", +"sport.hu", +"tm.hu", +"2000.hu", +"agrar.hu", +"bolt.hu", +"casino.hu", +"city.hu", +"erotica.hu", +"erotika.hu", +"film.hu", +"forum.hu", +"games.hu", +"hotel.hu", +"ingatlan.hu", +"jogasz.hu", +"konyvelo.hu", +"lakas.hu", +"media.hu", +"news.hu", +"reklam.hu", +"sex.hu", +"shop.hu", +"suli.hu", +"szex.hu", +"tozsde.hu", +"utazas.hu", +"video.hu", +"id", +"ac.id", +"biz.id", +"co.id", +"desa.id", +"go.id", +"mil.id", +"my.id", +"net.id", +"or.id", +"ponpes.id", +"sch.id", +"web.id", +"ie", +"gov.ie", +"il", +"ac.il", +"co.il", +"gov.il", +"idf.il", +"k12.il", +"muni.il", +"net.il", +"org.il", +"im", +"ac.im", +"co.im", +"com.im", +"ltd.co.im", +"net.im", +"org.im", +"plc.co.im", +"tt.im", +"tv.im", +"in", +"co.in", +"firm.in", +"net.in", +"org.in", +"gen.in", +"ind.in", +"nic.in", +"ac.in", +"edu.in", +"res.in", +"gov.in", +"mil.in", +"info", +"int", +"eu.int", +"io", +"com.io", +"iq", +"gov.iq", +"edu.iq", +"mil.iq", +"com.iq", +"org.iq", +"net.iq", +"ir", +"ac.ir", +"co.ir", +"gov.ir", +"id.ir", +"net.ir", +"org.ir", +"sch.ir", +"ایران.ir", +"ايران.ir", +"is", +"net.is", +"com.is", +"edu.is", +"gov.is", +"org.is", +"int.is", +"it", +"gov.it", +"edu.it", +"abr.it", +"abruzzo.it", +"aosta-valley.it", +"aostavalley.it", +"bas.it", +"basilicata.it", +"cal.it", +"calabria.it", +"cam.it", +"campania.it", +"emilia-romagna.it", +"emiliaromagna.it", +"emr.it", +"friuli-v-giulia.it", +"friuli-ve-giulia.it", +"friuli-vegiulia.it", +"friuli-venezia-giulia.it", +"friuli-veneziagiulia.it", +"friuli-vgiulia.it", +"friuliv-giulia.it", +"friulive-giulia.it", +"friulivegiulia.it", +"friulivenezia-giulia.it", +"friuliveneziagiulia.it", +"friulivgiulia.it", +"fvg.it", +"laz.it", +"lazio.it", +"lig.it", +"liguria.it", +"lom.it", +"lombardia.it", +"lombardy.it", +"lucania.it", +"mar.it", +"marche.it", +"mol.it", +"molise.it", +"piedmont.it", +"piemonte.it", +"pmn.it", +"pug.it", +"puglia.it", +"sar.it", +"sardegna.it", +"sardinia.it", +"sic.it", +"sicilia.it", +"sicily.it", +"taa.it", +"tos.it", +"toscana.it", +"trentin-sud-tirol.it", +"trentin-süd-tirol.it", +"trentin-sudtirol.it", +"trentin-südtirol.it", +"trentin-sued-tirol.it", +"trentin-suedtirol.it", +"trentino-a-adige.it", +"trentino-aadige.it", +"trentino-alto-adige.it", +"trentino-altoadige.it", +"trentino-s-tirol.it", +"trentino-stirol.it", +"trentino-sud-tirol.it", +"trentino-süd-tirol.it", +"trentino-sudtirol.it", +"trentino-südtirol.it", +"trentino-sued-tirol.it", +"trentino-suedtirol.it", +"trentino.it", +"trentinoa-adige.it", +"trentinoaadige.it", +"trentinoalto-adige.it", +"trentinoaltoadige.it", +"trentinos-tirol.it", +"trentinostirol.it", +"trentinosud-tirol.it", +"trentinosüd-tirol.it", +"trentinosudtirol.it", +"trentinosüdtirol.it", +"trentinosued-tirol.it", +"trentinosuedtirol.it", +"trentinsud-tirol.it", +"trentinsüd-tirol.it", +"trentinsudtirol.it", +"trentinsüdtirol.it", +"trentinsued-tirol.it", +"trentinsuedtirol.it", +"tuscany.it", +"umb.it", +"umbria.it", +"val-d-aosta.it", +"val-daosta.it", +"vald-aosta.it", +"valdaosta.it", +"valle-aosta.it", +"valle-d-aosta.it", +"valle-daosta.it", +"valleaosta.it", +"valled-aosta.it", +"valledaosta.it", +"vallee-aoste.it", +"vallée-aoste.it", +"vallee-d-aoste.it", +"vallée-d-aoste.it", +"valleeaoste.it", +"valléeaoste.it", +"valleedaoste.it", +"valléedaoste.it", +"vao.it", +"vda.it", +"ven.it", +"veneto.it", +"ag.it", +"agrigento.it", +"al.it", +"alessandria.it", +"alto-adige.it", +"altoadige.it", +"an.it", +"ancona.it", +"andria-barletta-trani.it", +"andria-trani-barletta.it", +"andriabarlettatrani.it", +"andriatranibarletta.it", +"ao.it", +"aosta.it", +"aoste.it", +"ap.it", +"aq.it", +"aquila.it", +"ar.it", +"arezzo.it", +"ascoli-piceno.it", +"ascolipiceno.it", +"asti.it", +"at.it", +"av.it", +"avellino.it", +"ba.it", +"balsan-sudtirol.it", +"balsan-südtirol.it", +"balsan-suedtirol.it", +"balsan.it", +"bari.it", +"barletta-trani-andria.it", +"barlettatraniandria.it", +"belluno.it", +"benevento.it", +"bergamo.it", +"bg.it", +"bi.it", +"biella.it", +"bl.it", +"bn.it", +"bo.it", +"bologna.it", +"bolzano-altoadige.it", +"bolzano.it", +"bozen-sudtirol.it", +"bozen-südtirol.it", +"bozen-suedtirol.it", +"bozen.it", +"br.it", +"brescia.it", +"brindisi.it", +"bs.it", +"bt.it", +"bulsan-sudtirol.it", +"bulsan-südtirol.it", +"bulsan-suedtirol.it", +"bulsan.it", +"bz.it", +"ca.it", +"cagliari.it", +"caltanissetta.it", +"campidano-medio.it", +"campidanomedio.it", +"campobasso.it", +"carbonia-iglesias.it", +"carboniaiglesias.it", +"carrara-massa.it", +"carraramassa.it", +"caserta.it", +"catania.it", +"catanzaro.it", +"cb.it", +"ce.it", +"cesena-forli.it", +"cesena-forlì.it", +"cesenaforli.it", +"cesenaforlì.it", +"ch.it", +"chieti.it", +"ci.it", +"cl.it", +"cn.it", +"co.it", +"como.it", +"cosenza.it", +"cr.it", +"cremona.it", +"crotone.it", +"cs.it", +"ct.it", +"cuneo.it", +"cz.it", +"dell-ogliastra.it", +"dellogliastra.it", +"en.it", +"enna.it", +"fc.it", +"fe.it", +"fermo.it", +"ferrara.it", +"fg.it", +"fi.it", +"firenze.it", +"florence.it", +"fm.it", +"foggia.it", +"forli-cesena.it", +"forlì-cesena.it", +"forlicesena.it", +"forlìcesena.it", +"fr.it", +"frosinone.it", +"ge.it", +"genoa.it", +"genova.it", +"go.it", +"gorizia.it", +"gr.it", +"grosseto.it", +"iglesias-carbonia.it", +"iglesiascarbonia.it", +"im.it", +"imperia.it", +"is.it", +"isernia.it", +"kr.it", +"la-spezia.it", +"laquila.it", +"laspezia.it", +"latina.it", +"lc.it", +"le.it", +"lecce.it", +"lecco.it", +"li.it", +"livorno.it", +"lo.it", +"lodi.it", +"lt.it", +"lu.it", +"lucca.it", +"macerata.it", +"mantova.it", +"massa-carrara.it", +"massacarrara.it", +"matera.it", +"mb.it", +"mc.it", +"me.it", +"medio-campidano.it", +"mediocampidano.it", +"messina.it", +"mi.it", +"milan.it", +"milano.it", +"mn.it", +"mo.it", +"modena.it", +"monza-brianza.it", +"monza-e-della-brianza.it", +"monza.it", +"monzabrianza.it", +"monzaebrianza.it", +"monzaedellabrianza.it", +"ms.it", +"mt.it", +"na.it", +"naples.it", +"napoli.it", +"no.it", +"novara.it", +"nu.it", +"nuoro.it", +"og.it", +"ogliastra.it", +"olbia-tempio.it", +"olbiatempio.it", +"or.it", +"oristano.it", +"ot.it", +"pa.it", +"padova.it", +"padua.it", +"palermo.it", +"parma.it", +"pavia.it", +"pc.it", +"pd.it", +"pe.it", +"perugia.it", +"pesaro-urbino.it", +"pesarourbino.it", +"pescara.it", +"pg.it", +"pi.it", +"piacenza.it", +"pisa.it", +"pistoia.it", +"pn.it", +"po.it", +"pordenone.it", +"potenza.it", +"pr.it", +"prato.it", +"pt.it", +"pu.it", +"pv.it", +"pz.it", +"ra.it", +"ragusa.it", +"ravenna.it", +"rc.it", +"re.it", +"reggio-calabria.it", +"reggio-emilia.it", +"reggiocalabria.it", +"reggioemilia.it", +"rg.it", +"ri.it", +"rieti.it", +"rimini.it", +"rm.it", +"rn.it", +"ro.it", +"roma.it", +"rome.it", +"rovigo.it", +"sa.it", +"salerno.it", +"sassari.it", +"savona.it", +"si.it", +"siena.it", +"siracusa.it", +"so.it", +"sondrio.it", +"sp.it", +"sr.it", +"ss.it", +"suedtirol.it", +"südtirol.it", +"sv.it", +"ta.it", +"taranto.it", +"te.it", +"tempio-olbia.it", +"tempioolbia.it", +"teramo.it", +"terni.it", +"tn.it", +"to.it", +"torino.it", +"tp.it", +"tr.it", +"trani-andria-barletta.it", +"trani-barletta-andria.it", +"traniandriabarletta.it", +"tranibarlettaandria.it", +"trapani.it", +"trento.it", +"treviso.it", +"trieste.it", +"ts.it", +"turin.it", +"tv.it", +"ud.it", +"udine.it", +"urbino-pesaro.it", +"urbinopesaro.it", +"va.it", +"varese.it", +"vb.it", +"vc.it", +"ve.it", +"venezia.it", +"venice.it", +"verbania.it", +"vercelli.it", +"verona.it", +"vi.it", +"vibo-valentia.it", +"vibovalentia.it", +"vicenza.it", +"viterbo.it", +"vr.it", +"vs.it", +"vt.it", +"vv.it", +"je", +"co.je", +"net.je", +"org.je", +"*.jm", +"jo", +"com.jo", +"org.jo", +"net.jo", +"edu.jo", +"sch.jo", +"gov.jo", +"mil.jo", +"name.jo", +"jobs", +"jp", +"ac.jp", +"ad.jp", +"co.jp", +"ed.jp", +"go.jp", +"gr.jp", +"lg.jp", +"ne.jp", +"or.jp", +"aichi.jp", +"akita.jp", +"aomori.jp", +"chiba.jp", +"ehime.jp", +"fukui.jp", +"fukuoka.jp", +"fukushima.jp", +"gifu.jp", +"gunma.jp", +"hiroshima.jp", +"hokkaido.jp", +"hyogo.jp", +"ibaraki.jp", +"ishikawa.jp", +"iwate.jp", +"kagawa.jp", +"kagoshima.jp", +"kanagawa.jp", +"kochi.jp", +"kumamoto.jp", +"kyoto.jp", +"mie.jp", +"miyagi.jp", +"miyazaki.jp", +"nagano.jp", +"nagasaki.jp", +"nara.jp", +"niigata.jp", +"oita.jp", +"okayama.jp", +"okinawa.jp", +"osaka.jp", +"saga.jp", +"saitama.jp", +"shiga.jp", +"shimane.jp", +"shizuoka.jp", +"tochigi.jp", +"tokushima.jp", +"tokyo.jp", +"tottori.jp", +"toyama.jp", +"wakayama.jp", +"yamagata.jp", +"yamaguchi.jp", +"yamanashi.jp", +"栃木.jp", +"愛知.jp", +"愛媛.jp", +"兵庫.jp", +"熊本.jp", +"茨城.jp", +"北海道.jp", +"千葉.jp", +"和歌山.jp", +"長崎.jp", +"長野.jp", +"新潟.jp", +"青森.jp", +"静岡.jp", +"東京.jp", +"石川.jp", +"埼玉.jp", +"三重.jp", +"京都.jp", +"佐賀.jp", +"大分.jp", +"大阪.jp", +"奈良.jp", +"宮城.jp", +"宮崎.jp", +"富山.jp", +"山口.jp", +"山形.jp", +"山梨.jp", +"岩手.jp", +"岐阜.jp", +"岡山.jp", +"島根.jp", +"広島.jp", +"徳島.jp", +"沖縄.jp", +"滋賀.jp", +"神奈川.jp", +"福井.jp", +"福岡.jp", +"福島.jp", +"秋田.jp", +"群馬.jp", +"香川.jp", +"高知.jp", +"鳥取.jp", +"鹿児島.jp", +"*.kawasaki.jp", +"*.kitakyushu.jp", +"*.kobe.jp", +"*.nagoya.jp", +"*.sapporo.jp", +"*.sendai.jp", +"*.yokohama.jp", +"!city.kawasaki.jp", +"!city.kitakyushu.jp", +"!city.kobe.jp", +"!city.nagoya.jp", +"!city.sapporo.jp", +"!city.sendai.jp", +"!city.yokohama.jp", +"aisai.aichi.jp", +"ama.aichi.jp", +"anjo.aichi.jp", +"asuke.aichi.jp", +"chiryu.aichi.jp", +"chita.aichi.jp", +"fuso.aichi.jp", +"gamagori.aichi.jp", +"handa.aichi.jp", +"hazu.aichi.jp", +"hekinan.aichi.jp", +"higashiura.aichi.jp", +"ichinomiya.aichi.jp", +"inazawa.aichi.jp", +"inuyama.aichi.jp", +"isshiki.aichi.jp", +"iwakura.aichi.jp", +"kanie.aichi.jp", +"kariya.aichi.jp", +"kasugai.aichi.jp", +"kira.aichi.jp", +"kiyosu.aichi.jp", +"komaki.aichi.jp", +"konan.aichi.jp", +"kota.aichi.jp", +"mihama.aichi.jp", +"miyoshi.aichi.jp", +"nishio.aichi.jp", +"nisshin.aichi.jp", +"obu.aichi.jp", +"oguchi.aichi.jp", +"oharu.aichi.jp", +"okazaki.aichi.jp", +"owariasahi.aichi.jp", +"seto.aichi.jp", +"shikatsu.aichi.jp", +"shinshiro.aichi.jp", +"shitara.aichi.jp", +"tahara.aichi.jp", +"takahama.aichi.jp", +"tobishima.aichi.jp", +"toei.aichi.jp", +"togo.aichi.jp", +"tokai.aichi.jp", +"tokoname.aichi.jp", +"toyoake.aichi.jp", +"toyohashi.aichi.jp", +"toyokawa.aichi.jp", +"toyone.aichi.jp", +"toyota.aichi.jp", +"tsushima.aichi.jp", +"yatomi.aichi.jp", +"akita.akita.jp", +"daisen.akita.jp", +"fujisato.akita.jp", +"gojome.akita.jp", +"hachirogata.akita.jp", +"happou.akita.jp", +"higashinaruse.akita.jp", +"honjo.akita.jp", +"honjyo.akita.jp", +"ikawa.akita.jp", +"kamikoani.akita.jp", +"kamioka.akita.jp", +"katagami.akita.jp", +"kazuno.akita.jp", +"kitaakita.akita.jp", +"kosaka.akita.jp", +"kyowa.akita.jp", +"misato.akita.jp", +"mitane.akita.jp", +"moriyoshi.akita.jp", +"nikaho.akita.jp", +"noshiro.akita.jp", +"odate.akita.jp", +"oga.akita.jp", +"ogata.akita.jp", +"semboku.akita.jp", +"yokote.akita.jp", +"yurihonjo.akita.jp", +"aomori.aomori.jp", +"gonohe.aomori.jp", +"hachinohe.aomori.jp", +"hashikami.aomori.jp", +"hiranai.aomori.jp", +"hirosaki.aomori.jp", +"itayanagi.aomori.jp", +"kuroishi.aomori.jp", +"misawa.aomori.jp", +"mutsu.aomori.jp", +"nakadomari.aomori.jp", +"noheji.aomori.jp", +"oirase.aomori.jp", +"owani.aomori.jp", +"rokunohe.aomori.jp", +"sannohe.aomori.jp", +"shichinohe.aomori.jp", +"shingo.aomori.jp", +"takko.aomori.jp", +"towada.aomori.jp", +"tsugaru.aomori.jp", +"tsuruta.aomori.jp", +"abiko.chiba.jp", +"asahi.chiba.jp", +"chonan.chiba.jp", +"chosei.chiba.jp", +"choshi.chiba.jp", +"chuo.chiba.jp", +"funabashi.chiba.jp", +"futtsu.chiba.jp", +"hanamigawa.chiba.jp", +"ichihara.chiba.jp", +"ichikawa.chiba.jp", +"ichinomiya.chiba.jp", +"inzai.chiba.jp", +"isumi.chiba.jp", +"kamagaya.chiba.jp", +"kamogawa.chiba.jp", +"kashiwa.chiba.jp", +"katori.chiba.jp", +"katsuura.chiba.jp", +"kimitsu.chiba.jp", +"kisarazu.chiba.jp", +"kozaki.chiba.jp", +"kujukuri.chiba.jp", +"kyonan.chiba.jp", +"matsudo.chiba.jp", +"midori.chiba.jp", +"mihama.chiba.jp", +"minamiboso.chiba.jp", +"mobara.chiba.jp", +"mutsuzawa.chiba.jp", +"nagara.chiba.jp", +"nagareyama.chiba.jp", +"narashino.chiba.jp", +"narita.chiba.jp", +"noda.chiba.jp", +"oamishirasato.chiba.jp", +"omigawa.chiba.jp", +"onjuku.chiba.jp", +"otaki.chiba.jp", +"sakae.chiba.jp", +"sakura.chiba.jp", +"shimofusa.chiba.jp", +"shirako.chiba.jp", +"shiroi.chiba.jp", +"shisui.chiba.jp", +"sodegaura.chiba.jp", +"sosa.chiba.jp", +"tako.chiba.jp", +"tateyama.chiba.jp", +"togane.chiba.jp", +"tohnosho.chiba.jp", +"tomisato.chiba.jp", +"urayasu.chiba.jp", +"yachimata.chiba.jp", +"yachiyo.chiba.jp", +"yokaichiba.chiba.jp", +"yokoshibahikari.chiba.jp", +"yotsukaido.chiba.jp", +"ainan.ehime.jp", +"honai.ehime.jp", +"ikata.ehime.jp", +"imabari.ehime.jp", +"iyo.ehime.jp", +"kamijima.ehime.jp", +"kihoku.ehime.jp", +"kumakogen.ehime.jp", +"masaki.ehime.jp", +"matsuno.ehime.jp", +"matsuyama.ehime.jp", +"namikata.ehime.jp", +"niihama.ehime.jp", +"ozu.ehime.jp", +"saijo.ehime.jp", +"seiyo.ehime.jp", +"shikokuchuo.ehime.jp", +"tobe.ehime.jp", +"toon.ehime.jp", +"uchiko.ehime.jp", +"uwajima.ehime.jp", +"yawatahama.ehime.jp", +"echizen.fukui.jp", +"eiheiji.fukui.jp", +"fukui.fukui.jp", +"ikeda.fukui.jp", +"katsuyama.fukui.jp", +"mihama.fukui.jp", +"minamiechizen.fukui.jp", +"obama.fukui.jp", +"ohi.fukui.jp", +"ono.fukui.jp", +"sabae.fukui.jp", +"sakai.fukui.jp", +"takahama.fukui.jp", +"tsuruga.fukui.jp", +"wakasa.fukui.jp", +"ashiya.fukuoka.jp", +"buzen.fukuoka.jp", +"chikugo.fukuoka.jp", +"chikuho.fukuoka.jp", +"chikujo.fukuoka.jp", +"chikushino.fukuoka.jp", +"chikuzen.fukuoka.jp", +"chuo.fukuoka.jp", +"dazaifu.fukuoka.jp", +"fukuchi.fukuoka.jp", +"hakata.fukuoka.jp", +"higashi.fukuoka.jp", +"hirokawa.fukuoka.jp", +"hisayama.fukuoka.jp", +"iizuka.fukuoka.jp", +"inatsuki.fukuoka.jp", +"kaho.fukuoka.jp", +"kasuga.fukuoka.jp", +"kasuya.fukuoka.jp", +"kawara.fukuoka.jp", +"keisen.fukuoka.jp", +"koga.fukuoka.jp", +"kurate.fukuoka.jp", +"kurogi.fukuoka.jp", +"kurume.fukuoka.jp", +"minami.fukuoka.jp", +"miyako.fukuoka.jp", +"miyama.fukuoka.jp", +"miyawaka.fukuoka.jp", +"mizumaki.fukuoka.jp", +"munakata.fukuoka.jp", +"nakagawa.fukuoka.jp", +"nakama.fukuoka.jp", +"nishi.fukuoka.jp", +"nogata.fukuoka.jp", +"ogori.fukuoka.jp", +"okagaki.fukuoka.jp", +"okawa.fukuoka.jp", +"oki.fukuoka.jp", +"omuta.fukuoka.jp", +"onga.fukuoka.jp", +"onojo.fukuoka.jp", +"oto.fukuoka.jp", +"saigawa.fukuoka.jp", +"sasaguri.fukuoka.jp", +"shingu.fukuoka.jp", +"shinyoshitomi.fukuoka.jp", +"shonai.fukuoka.jp", +"soeda.fukuoka.jp", +"sue.fukuoka.jp", +"tachiarai.fukuoka.jp", +"tagawa.fukuoka.jp", +"takata.fukuoka.jp", +"toho.fukuoka.jp", +"toyotsu.fukuoka.jp", +"tsuiki.fukuoka.jp", +"ukiha.fukuoka.jp", +"umi.fukuoka.jp", +"usui.fukuoka.jp", +"yamada.fukuoka.jp", +"yame.fukuoka.jp", +"yanagawa.fukuoka.jp", +"yukuhashi.fukuoka.jp", +"aizubange.fukushima.jp", +"aizumisato.fukushima.jp", +"aizuwakamatsu.fukushima.jp", +"asakawa.fukushima.jp", +"bandai.fukushima.jp", +"date.fukushima.jp", +"fukushima.fukushima.jp", +"furudono.fukushima.jp", +"futaba.fukushima.jp", +"hanawa.fukushima.jp", +"higashi.fukushima.jp", +"hirata.fukushima.jp", +"hirono.fukushima.jp", +"iitate.fukushima.jp", +"inawashiro.fukushima.jp", +"ishikawa.fukushima.jp", +"iwaki.fukushima.jp", +"izumizaki.fukushima.jp", +"kagamiishi.fukushima.jp", +"kaneyama.fukushima.jp", +"kawamata.fukushima.jp", +"kitakata.fukushima.jp", +"kitashiobara.fukushima.jp", +"koori.fukushima.jp", +"koriyama.fukushima.jp", +"kunimi.fukushima.jp", +"miharu.fukushima.jp", +"mishima.fukushima.jp", +"namie.fukushima.jp", +"nango.fukushima.jp", +"nishiaizu.fukushima.jp", +"nishigo.fukushima.jp", +"okuma.fukushima.jp", +"omotego.fukushima.jp", +"ono.fukushima.jp", +"otama.fukushima.jp", +"samegawa.fukushima.jp", +"shimogo.fukushima.jp", +"shirakawa.fukushima.jp", +"showa.fukushima.jp", +"soma.fukushima.jp", +"sukagawa.fukushima.jp", +"taishin.fukushima.jp", +"tamakawa.fukushima.jp", +"tanagura.fukushima.jp", +"tenei.fukushima.jp", +"yabuki.fukushima.jp", +"yamato.fukushima.jp", +"yamatsuri.fukushima.jp", +"yanaizu.fukushima.jp", +"yugawa.fukushima.jp", +"anpachi.gifu.jp", +"ena.gifu.jp", +"gifu.gifu.jp", +"ginan.gifu.jp", +"godo.gifu.jp", +"gujo.gifu.jp", +"hashima.gifu.jp", +"hichiso.gifu.jp", +"hida.gifu.jp", +"higashishirakawa.gifu.jp", +"ibigawa.gifu.jp", +"ikeda.gifu.jp", +"kakamigahara.gifu.jp", +"kani.gifu.jp", +"kasahara.gifu.jp", +"kasamatsu.gifu.jp", +"kawaue.gifu.jp", +"kitagata.gifu.jp", +"mino.gifu.jp", +"minokamo.gifu.jp", +"mitake.gifu.jp", +"mizunami.gifu.jp", +"motosu.gifu.jp", +"nakatsugawa.gifu.jp", +"ogaki.gifu.jp", +"sakahogi.gifu.jp", +"seki.gifu.jp", +"sekigahara.gifu.jp", +"shirakawa.gifu.jp", +"tajimi.gifu.jp", +"takayama.gifu.jp", +"tarui.gifu.jp", +"toki.gifu.jp", +"tomika.gifu.jp", +"wanouchi.gifu.jp", +"yamagata.gifu.jp", +"yaotsu.gifu.jp", +"yoro.gifu.jp", +"annaka.gunma.jp", +"chiyoda.gunma.jp", +"fujioka.gunma.jp", +"higashiagatsuma.gunma.jp", +"isesaki.gunma.jp", +"itakura.gunma.jp", +"kanna.gunma.jp", +"kanra.gunma.jp", +"katashina.gunma.jp", +"kawaba.gunma.jp", +"kiryu.gunma.jp", +"kusatsu.gunma.jp", +"maebashi.gunma.jp", +"meiwa.gunma.jp", +"midori.gunma.jp", +"minakami.gunma.jp", +"naganohara.gunma.jp", +"nakanojo.gunma.jp", +"nanmoku.gunma.jp", +"numata.gunma.jp", +"oizumi.gunma.jp", +"ora.gunma.jp", +"ota.gunma.jp", +"shibukawa.gunma.jp", +"shimonita.gunma.jp", +"shinto.gunma.jp", +"showa.gunma.jp", +"takasaki.gunma.jp", +"takayama.gunma.jp", +"tamamura.gunma.jp", +"tatebayashi.gunma.jp", +"tomioka.gunma.jp", +"tsukiyono.gunma.jp", +"tsumagoi.gunma.jp", +"ueno.gunma.jp", +"yoshioka.gunma.jp", +"asaminami.hiroshima.jp", +"daiwa.hiroshima.jp", +"etajima.hiroshima.jp", +"fuchu.hiroshima.jp", +"fukuyama.hiroshima.jp", +"hatsukaichi.hiroshima.jp", +"higashihiroshima.hiroshima.jp", +"hongo.hiroshima.jp", +"jinsekikogen.hiroshima.jp", +"kaita.hiroshima.jp", +"kui.hiroshima.jp", +"kumano.hiroshima.jp", +"kure.hiroshima.jp", +"mihara.hiroshima.jp", +"miyoshi.hiroshima.jp", +"naka.hiroshima.jp", +"onomichi.hiroshima.jp", +"osakikamijima.hiroshima.jp", +"otake.hiroshima.jp", +"saka.hiroshima.jp", +"sera.hiroshima.jp", +"seranishi.hiroshima.jp", +"shinichi.hiroshima.jp", +"shobara.hiroshima.jp", +"takehara.hiroshima.jp", +"abashiri.hokkaido.jp", +"abira.hokkaido.jp", +"aibetsu.hokkaido.jp", +"akabira.hokkaido.jp", +"akkeshi.hokkaido.jp", +"asahikawa.hokkaido.jp", +"ashibetsu.hokkaido.jp", +"ashoro.hokkaido.jp", +"assabu.hokkaido.jp", +"atsuma.hokkaido.jp", +"bibai.hokkaido.jp", +"biei.hokkaido.jp", +"bifuka.hokkaido.jp", +"bihoro.hokkaido.jp", +"biratori.hokkaido.jp", +"chippubetsu.hokkaido.jp", +"chitose.hokkaido.jp", +"date.hokkaido.jp", +"ebetsu.hokkaido.jp", +"embetsu.hokkaido.jp", +"eniwa.hokkaido.jp", +"erimo.hokkaido.jp", +"esan.hokkaido.jp", +"esashi.hokkaido.jp", +"fukagawa.hokkaido.jp", +"fukushima.hokkaido.jp", +"furano.hokkaido.jp", +"furubira.hokkaido.jp", +"haboro.hokkaido.jp", +"hakodate.hokkaido.jp", +"hamatonbetsu.hokkaido.jp", +"hidaka.hokkaido.jp", +"higashikagura.hokkaido.jp", +"higashikawa.hokkaido.jp", +"hiroo.hokkaido.jp", +"hokuryu.hokkaido.jp", +"hokuto.hokkaido.jp", +"honbetsu.hokkaido.jp", +"horokanai.hokkaido.jp", +"horonobe.hokkaido.jp", +"ikeda.hokkaido.jp", +"imakane.hokkaido.jp", +"ishikari.hokkaido.jp", +"iwamizawa.hokkaido.jp", +"iwanai.hokkaido.jp", +"kamifurano.hokkaido.jp", +"kamikawa.hokkaido.jp", +"kamishihoro.hokkaido.jp", +"kamisunagawa.hokkaido.jp", +"kamoenai.hokkaido.jp", +"kayabe.hokkaido.jp", +"kembuchi.hokkaido.jp", +"kikonai.hokkaido.jp", +"kimobetsu.hokkaido.jp", +"kitahiroshima.hokkaido.jp", +"kitami.hokkaido.jp", +"kiyosato.hokkaido.jp", +"koshimizu.hokkaido.jp", +"kunneppu.hokkaido.jp", +"kuriyama.hokkaido.jp", +"kuromatsunai.hokkaido.jp", +"kushiro.hokkaido.jp", +"kutchan.hokkaido.jp", +"kyowa.hokkaido.jp", +"mashike.hokkaido.jp", +"matsumae.hokkaido.jp", +"mikasa.hokkaido.jp", +"minamifurano.hokkaido.jp", +"mombetsu.hokkaido.jp", +"moseushi.hokkaido.jp", +"mukawa.hokkaido.jp", +"muroran.hokkaido.jp", +"naie.hokkaido.jp", +"nakagawa.hokkaido.jp", +"nakasatsunai.hokkaido.jp", +"nakatombetsu.hokkaido.jp", +"nanae.hokkaido.jp", +"nanporo.hokkaido.jp", +"nayoro.hokkaido.jp", +"nemuro.hokkaido.jp", +"niikappu.hokkaido.jp", +"niki.hokkaido.jp", +"nishiokoppe.hokkaido.jp", +"noboribetsu.hokkaido.jp", +"numata.hokkaido.jp", +"obihiro.hokkaido.jp", +"obira.hokkaido.jp", +"oketo.hokkaido.jp", +"okoppe.hokkaido.jp", +"otaru.hokkaido.jp", +"otobe.hokkaido.jp", +"otofuke.hokkaido.jp", +"otoineppu.hokkaido.jp", +"oumu.hokkaido.jp", +"ozora.hokkaido.jp", +"pippu.hokkaido.jp", +"rankoshi.hokkaido.jp", +"rebun.hokkaido.jp", +"rikubetsu.hokkaido.jp", +"rishiri.hokkaido.jp", +"rishirifuji.hokkaido.jp", +"saroma.hokkaido.jp", +"sarufutsu.hokkaido.jp", +"shakotan.hokkaido.jp", +"shari.hokkaido.jp", +"shibecha.hokkaido.jp", +"shibetsu.hokkaido.jp", +"shikabe.hokkaido.jp", +"shikaoi.hokkaido.jp", +"shimamaki.hokkaido.jp", +"shimizu.hokkaido.jp", +"shimokawa.hokkaido.jp", +"shinshinotsu.hokkaido.jp", +"shintoku.hokkaido.jp", +"shiranuka.hokkaido.jp", +"shiraoi.hokkaido.jp", +"shiriuchi.hokkaido.jp", +"sobetsu.hokkaido.jp", +"sunagawa.hokkaido.jp", +"taiki.hokkaido.jp", +"takasu.hokkaido.jp", +"takikawa.hokkaido.jp", +"takinoue.hokkaido.jp", +"teshikaga.hokkaido.jp", +"tobetsu.hokkaido.jp", +"tohma.hokkaido.jp", +"tomakomai.hokkaido.jp", +"tomari.hokkaido.jp", +"toya.hokkaido.jp", +"toyako.hokkaido.jp", +"toyotomi.hokkaido.jp", +"toyoura.hokkaido.jp", +"tsubetsu.hokkaido.jp", +"tsukigata.hokkaido.jp", +"urakawa.hokkaido.jp", +"urausu.hokkaido.jp", +"uryu.hokkaido.jp", +"utashinai.hokkaido.jp", +"wakkanai.hokkaido.jp", +"wassamu.hokkaido.jp", +"yakumo.hokkaido.jp", +"yoichi.hokkaido.jp", +"aioi.hyogo.jp", +"akashi.hyogo.jp", +"ako.hyogo.jp", +"amagasaki.hyogo.jp", +"aogaki.hyogo.jp", +"asago.hyogo.jp", +"ashiya.hyogo.jp", +"awaji.hyogo.jp", +"fukusaki.hyogo.jp", +"goshiki.hyogo.jp", +"harima.hyogo.jp", +"himeji.hyogo.jp", +"ichikawa.hyogo.jp", +"inagawa.hyogo.jp", +"itami.hyogo.jp", +"kakogawa.hyogo.jp", +"kamigori.hyogo.jp", +"kamikawa.hyogo.jp", +"kasai.hyogo.jp", +"kasuga.hyogo.jp", +"kawanishi.hyogo.jp", +"miki.hyogo.jp", +"minamiawaji.hyogo.jp", +"nishinomiya.hyogo.jp", +"nishiwaki.hyogo.jp", +"ono.hyogo.jp", +"sanda.hyogo.jp", +"sannan.hyogo.jp", +"sasayama.hyogo.jp", +"sayo.hyogo.jp", +"shingu.hyogo.jp", +"shinonsen.hyogo.jp", +"shiso.hyogo.jp", +"sumoto.hyogo.jp", +"taishi.hyogo.jp", +"taka.hyogo.jp", +"takarazuka.hyogo.jp", +"takasago.hyogo.jp", +"takino.hyogo.jp", +"tamba.hyogo.jp", +"tatsuno.hyogo.jp", +"toyooka.hyogo.jp", +"yabu.hyogo.jp", +"yashiro.hyogo.jp", +"yoka.hyogo.jp", +"yokawa.hyogo.jp", +"ami.ibaraki.jp", +"asahi.ibaraki.jp", +"bando.ibaraki.jp", +"chikusei.ibaraki.jp", +"daigo.ibaraki.jp", +"fujishiro.ibaraki.jp", +"hitachi.ibaraki.jp", +"hitachinaka.ibaraki.jp", +"hitachiomiya.ibaraki.jp", +"hitachiota.ibaraki.jp", +"ibaraki.ibaraki.jp", +"ina.ibaraki.jp", +"inashiki.ibaraki.jp", +"itako.ibaraki.jp", +"iwama.ibaraki.jp", +"joso.ibaraki.jp", +"kamisu.ibaraki.jp", +"kasama.ibaraki.jp", +"kashima.ibaraki.jp", +"kasumigaura.ibaraki.jp", +"koga.ibaraki.jp", +"miho.ibaraki.jp", +"mito.ibaraki.jp", +"moriya.ibaraki.jp", +"naka.ibaraki.jp", +"namegata.ibaraki.jp", +"oarai.ibaraki.jp", +"ogawa.ibaraki.jp", +"omitama.ibaraki.jp", +"ryugasaki.ibaraki.jp", +"sakai.ibaraki.jp", +"sakuragawa.ibaraki.jp", +"shimodate.ibaraki.jp", +"shimotsuma.ibaraki.jp", +"shirosato.ibaraki.jp", +"sowa.ibaraki.jp", +"suifu.ibaraki.jp", +"takahagi.ibaraki.jp", +"tamatsukuri.ibaraki.jp", +"tokai.ibaraki.jp", +"tomobe.ibaraki.jp", +"tone.ibaraki.jp", +"toride.ibaraki.jp", +"tsuchiura.ibaraki.jp", +"tsukuba.ibaraki.jp", +"uchihara.ibaraki.jp", +"ushiku.ibaraki.jp", +"yachiyo.ibaraki.jp", +"yamagata.ibaraki.jp", +"yawara.ibaraki.jp", +"yuki.ibaraki.jp", +"anamizu.ishikawa.jp", +"hakui.ishikawa.jp", +"hakusan.ishikawa.jp", +"kaga.ishikawa.jp", +"kahoku.ishikawa.jp", +"kanazawa.ishikawa.jp", +"kawakita.ishikawa.jp", +"komatsu.ishikawa.jp", +"nakanoto.ishikawa.jp", +"nanao.ishikawa.jp", +"nomi.ishikawa.jp", +"nonoichi.ishikawa.jp", +"noto.ishikawa.jp", +"shika.ishikawa.jp", +"suzu.ishikawa.jp", +"tsubata.ishikawa.jp", +"tsurugi.ishikawa.jp", +"uchinada.ishikawa.jp", +"wajima.ishikawa.jp", +"fudai.iwate.jp", +"fujisawa.iwate.jp", +"hanamaki.iwate.jp", +"hiraizumi.iwate.jp", +"hirono.iwate.jp", +"ichinohe.iwate.jp", +"ichinoseki.iwate.jp", +"iwaizumi.iwate.jp", +"iwate.iwate.jp", +"joboji.iwate.jp", +"kamaishi.iwate.jp", +"kanegasaki.iwate.jp", +"karumai.iwate.jp", +"kawai.iwate.jp", +"kitakami.iwate.jp", +"kuji.iwate.jp", +"kunohe.iwate.jp", +"kuzumaki.iwate.jp", +"miyako.iwate.jp", +"mizusawa.iwate.jp", +"morioka.iwate.jp", +"ninohe.iwate.jp", +"noda.iwate.jp", +"ofunato.iwate.jp", +"oshu.iwate.jp", +"otsuchi.iwate.jp", +"rikuzentakata.iwate.jp", +"shiwa.iwate.jp", +"shizukuishi.iwate.jp", +"sumita.iwate.jp", +"tanohata.iwate.jp", +"tono.iwate.jp", +"yahaba.iwate.jp", +"yamada.iwate.jp", +"ayagawa.kagawa.jp", +"higashikagawa.kagawa.jp", +"kanonji.kagawa.jp", +"kotohira.kagawa.jp", +"manno.kagawa.jp", +"marugame.kagawa.jp", +"mitoyo.kagawa.jp", +"naoshima.kagawa.jp", +"sanuki.kagawa.jp", +"tadotsu.kagawa.jp", +"takamatsu.kagawa.jp", +"tonosho.kagawa.jp", +"uchinomi.kagawa.jp", +"utazu.kagawa.jp", +"zentsuji.kagawa.jp", +"akune.kagoshima.jp", +"amami.kagoshima.jp", +"hioki.kagoshima.jp", +"isa.kagoshima.jp", +"isen.kagoshima.jp", +"izumi.kagoshima.jp", +"kagoshima.kagoshima.jp", +"kanoya.kagoshima.jp", +"kawanabe.kagoshima.jp", +"kinko.kagoshima.jp", +"kouyama.kagoshima.jp", +"makurazaki.kagoshima.jp", +"matsumoto.kagoshima.jp", +"minamitane.kagoshima.jp", +"nakatane.kagoshima.jp", +"nishinoomote.kagoshima.jp", +"satsumasendai.kagoshima.jp", +"soo.kagoshima.jp", +"tarumizu.kagoshima.jp", +"yusui.kagoshima.jp", +"aikawa.kanagawa.jp", +"atsugi.kanagawa.jp", +"ayase.kanagawa.jp", +"chigasaki.kanagawa.jp", +"ebina.kanagawa.jp", +"fujisawa.kanagawa.jp", +"hadano.kanagawa.jp", +"hakone.kanagawa.jp", +"hiratsuka.kanagawa.jp", +"isehara.kanagawa.jp", +"kaisei.kanagawa.jp", +"kamakura.kanagawa.jp", +"kiyokawa.kanagawa.jp", +"matsuda.kanagawa.jp", +"minamiashigara.kanagawa.jp", +"miura.kanagawa.jp", +"nakai.kanagawa.jp", +"ninomiya.kanagawa.jp", +"odawara.kanagawa.jp", +"oi.kanagawa.jp", +"oiso.kanagawa.jp", +"sagamihara.kanagawa.jp", +"samukawa.kanagawa.jp", +"tsukui.kanagawa.jp", +"yamakita.kanagawa.jp", +"yamato.kanagawa.jp", +"yokosuka.kanagawa.jp", +"yugawara.kanagawa.jp", +"zama.kanagawa.jp", +"zushi.kanagawa.jp", +"aki.kochi.jp", +"geisei.kochi.jp", +"hidaka.kochi.jp", +"higashitsuno.kochi.jp", +"ino.kochi.jp", +"kagami.kochi.jp", +"kami.kochi.jp", +"kitagawa.kochi.jp", +"kochi.kochi.jp", +"mihara.kochi.jp", +"motoyama.kochi.jp", +"muroto.kochi.jp", +"nahari.kochi.jp", +"nakamura.kochi.jp", +"nankoku.kochi.jp", +"nishitosa.kochi.jp", +"niyodogawa.kochi.jp", +"ochi.kochi.jp", +"okawa.kochi.jp", +"otoyo.kochi.jp", +"otsuki.kochi.jp", +"sakawa.kochi.jp", +"sukumo.kochi.jp", +"susaki.kochi.jp", +"tosa.kochi.jp", +"tosashimizu.kochi.jp", +"toyo.kochi.jp", +"tsuno.kochi.jp", +"umaji.kochi.jp", +"yasuda.kochi.jp", +"yusuhara.kochi.jp", +"amakusa.kumamoto.jp", +"arao.kumamoto.jp", +"aso.kumamoto.jp", +"choyo.kumamoto.jp", +"gyokuto.kumamoto.jp", +"kamiamakusa.kumamoto.jp", +"kikuchi.kumamoto.jp", +"kumamoto.kumamoto.jp", +"mashiki.kumamoto.jp", +"mifune.kumamoto.jp", +"minamata.kumamoto.jp", +"minamioguni.kumamoto.jp", +"nagasu.kumamoto.jp", +"nishihara.kumamoto.jp", +"oguni.kumamoto.jp", +"ozu.kumamoto.jp", +"sumoto.kumamoto.jp", +"takamori.kumamoto.jp", +"uki.kumamoto.jp", +"uto.kumamoto.jp", +"yamaga.kumamoto.jp", +"yamato.kumamoto.jp", +"yatsushiro.kumamoto.jp", +"ayabe.kyoto.jp", +"fukuchiyama.kyoto.jp", +"higashiyama.kyoto.jp", +"ide.kyoto.jp", +"ine.kyoto.jp", +"joyo.kyoto.jp", +"kameoka.kyoto.jp", +"kamo.kyoto.jp", +"kita.kyoto.jp", +"kizu.kyoto.jp", +"kumiyama.kyoto.jp", +"kyotamba.kyoto.jp", +"kyotanabe.kyoto.jp", +"kyotango.kyoto.jp", +"maizuru.kyoto.jp", +"minami.kyoto.jp", +"minamiyamashiro.kyoto.jp", +"miyazu.kyoto.jp", +"muko.kyoto.jp", +"nagaokakyo.kyoto.jp", +"nakagyo.kyoto.jp", +"nantan.kyoto.jp", +"oyamazaki.kyoto.jp", +"sakyo.kyoto.jp", +"seika.kyoto.jp", +"tanabe.kyoto.jp", +"uji.kyoto.jp", +"ujitawara.kyoto.jp", +"wazuka.kyoto.jp", +"yamashina.kyoto.jp", +"yawata.kyoto.jp", +"asahi.mie.jp", +"inabe.mie.jp", +"ise.mie.jp", +"kameyama.mie.jp", +"kawagoe.mie.jp", +"kiho.mie.jp", +"kisosaki.mie.jp", +"kiwa.mie.jp", +"komono.mie.jp", +"kumano.mie.jp", +"kuwana.mie.jp", +"matsusaka.mie.jp", +"meiwa.mie.jp", +"mihama.mie.jp", +"minamiise.mie.jp", +"misugi.mie.jp", +"miyama.mie.jp", +"nabari.mie.jp", +"shima.mie.jp", +"suzuka.mie.jp", +"tado.mie.jp", +"taiki.mie.jp", +"taki.mie.jp", +"tamaki.mie.jp", +"toba.mie.jp", +"tsu.mie.jp", +"udono.mie.jp", +"ureshino.mie.jp", +"watarai.mie.jp", +"yokkaichi.mie.jp", +"furukawa.miyagi.jp", +"higashimatsushima.miyagi.jp", +"ishinomaki.miyagi.jp", +"iwanuma.miyagi.jp", +"kakuda.miyagi.jp", +"kami.miyagi.jp", +"kawasaki.miyagi.jp", +"marumori.miyagi.jp", +"matsushima.miyagi.jp", +"minamisanriku.miyagi.jp", +"misato.miyagi.jp", +"murata.miyagi.jp", +"natori.miyagi.jp", +"ogawara.miyagi.jp", +"ohira.miyagi.jp", +"onagawa.miyagi.jp", +"osaki.miyagi.jp", +"rifu.miyagi.jp", +"semine.miyagi.jp", +"shibata.miyagi.jp", +"shichikashuku.miyagi.jp", +"shikama.miyagi.jp", +"shiogama.miyagi.jp", +"shiroishi.miyagi.jp", +"tagajo.miyagi.jp", +"taiwa.miyagi.jp", +"tome.miyagi.jp", +"tomiya.miyagi.jp", +"wakuya.miyagi.jp", +"watari.miyagi.jp", +"yamamoto.miyagi.jp", +"zao.miyagi.jp", +"aya.miyazaki.jp", +"ebino.miyazaki.jp", +"gokase.miyazaki.jp", +"hyuga.miyazaki.jp", +"kadogawa.miyazaki.jp", +"kawaminami.miyazaki.jp", +"kijo.miyazaki.jp", +"kitagawa.miyazaki.jp", +"kitakata.miyazaki.jp", +"kitaura.miyazaki.jp", +"kobayashi.miyazaki.jp", +"kunitomi.miyazaki.jp", +"kushima.miyazaki.jp", +"mimata.miyazaki.jp", +"miyakonojo.miyazaki.jp", +"miyazaki.miyazaki.jp", +"morotsuka.miyazaki.jp", +"nichinan.miyazaki.jp", +"nishimera.miyazaki.jp", +"nobeoka.miyazaki.jp", +"saito.miyazaki.jp", +"shiiba.miyazaki.jp", +"shintomi.miyazaki.jp", +"takaharu.miyazaki.jp", +"takanabe.miyazaki.jp", +"takazaki.miyazaki.jp", +"tsuno.miyazaki.jp", +"achi.nagano.jp", +"agematsu.nagano.jp", +"anan.nagano.jp", +"aoki.nagano.jp", +"asahi.nagano.jp", +"azumino.nagano.jp", +"chikuhoku.nagano.jp", +"chikuma.nagano.jp", +"chino.nagano.jp", +"fujimi.nagano.jp", +"hakuba.nagano.jp", +"hara.nagano.jp", +"hiraya.nagano.jp", +"iida.nagano.jp", +"iijima.nagano.jp", +"iiyama.nagano.jp", +"iizuna.nagano.jp", +"ikeda.nagano.jp", +"ikusaka.nagano.jp", +"ina.nagano.jp", +"karuizawa.nagano.jp", +"kawakami.nagano.jp", +"kiso.nagano.jp", +"kisofukushima.nagano.jp", +"kitaaiki.nagano.jp", +"komagane.nagano.jp", +"komoro.nagano.jp", +"matsukawa.nagano.jp", +"matsumoto.nagano.jp", +"miasa.nagano.jp", +"minamiaiki.nagano.jp", +"minamimaki.nagano.jp", +"minamiminowa.nagano.jp", +"minowa.nagano.jp", +"miyada.nagano.jp", +"miyota.nagano.jp", +"mochizuki.nagano.jp", +"nagano.nagano.jp", +"nagawa.nagano.jp", +"nagiso.nagano.jp", +"nakagawa.nagano.jp", +"nakano.nagano.jp", +"nozawaonsen.nagano.jp", +"obuse.nagano.jp", +"ogawa.nagano.jp", +"okaya.nagano.jp", +"omachi.nagano.jp", +"omi.nagano.jp", +"ookuwa.nagano.jp", +"ooshika.nagano.jp", +"otaki.nagano.jp", +"otari.nagano.jp", +"sakae.nagano.jp", +"sakaki.nagano.jp", +"saku.nagano.jp", +"sakuho.nagano.jp", +"shimosuwa.nagano.jp", +"shinanomachi.nagano.jp", +"shiojiri.nagano.jp", +"suwa.nagano.jp", +"suzaka.nagano.jp", +"takagi.nagano.jp", +"takamori.nagano.jp", +"takayama.nagano.jp", +"tateshina.nagano.jp", +"tatsuno.nagano.jp", +"togakushi.nagano.jp", +"togura.nagano.jp", +"tomi.nagano.jp", +"ueda.nagano.jp", +"wada.nagano.jp", +"yamagata.nagano.jp", +"yamanouchi.nagano.jp", +"yasaka.nagano.jp", +"yasuoka.nagano.jp", +"chijiwa.nagasaki.jp", +"futsu.nagasaki.jp", +"goto.nagasaki.jp", +"hasami.nagasaki.jp", +"hirado.nagasaki.jp", +"iki.nagasaki.jp", +"isahaya.nagasaki.jp", +"kawatana.nagasaki.jp", +"kuchinotsu.nagasaki.jp", +"matsuura.nagasaki.jp", +"nagasaki.nagasaki.jp", +"obama.nagasaki.jp", +"omura.nagasaki.jp", +"oseto.nagasaki.jp", +"saikai.nagasaki.jp", +"sasebo.nagasaki.jp", +"seihi.nagasaki.jp", +"shimabara.nagasaki.jp", +"shinkamigoto.nagasaki.jp", +"togitsu.nagasaki.jp", +"tsushima.nagasaki.jp", +"unzen.nagasaki.jp", +"ando.nara.jp", +"gose.nara.jp", +"heguri.nara.jp", +"higashiyoshino.nara.jp", +"ikaruga.nara.jp", +"ikoma.nara.jp", +"kamikitayama.nara.jp", +"kanmaki.nara.jp", +"kashiba.nara.jp", +"kashihara.nara.jp", +"katsuragi.nara.jp", +"kawai.nara.jp", +"kawakami.nara.jp", +"kawanishi.nara.jp", +"koryo.nara.jp", +"kurotaki.nara.jp", +"mitsue.nara.jp", +"miyake.nara.jp", +"nara.nara.jp", +"nosegawa.nara.jp", +"oji.nara.jp", +"ouda.nara.jp", +"oyodo.nara.jp", +"sakurai.nara.jp", +"sango.nara.jp", +"shimoichi.nara.jp", +"shimokitayama.nara.jp", +"shinjo.nara.jp", +"soni.nara.jp", +"takatori.nara.jp", +"tawaramoto.nara.jp", +"tenkawa.nara.jp", +"tenri.nara.jp", +"uda.nara.jp", +"yamatokoriyama.nara.jp", +"yamatotakada.nara.jp", +"yamazoe.nara.jp", +"yoshino.nara.jp", +"aga.niigata.jp", +"agano.niigata.jp", +"gosen.niigata.jp", +"itoigawa.niigata.jp", +"izumozaki.niigata.jp", +"joetsu.niigata.jp", +"kamo.niigata.jp", +"kariwa.niigata.jp", +"kashiwazaki.niigata.jp", +"minamiuonuma.niigata.jp", +"mitsuke.niigata.jp", +"muika.niigata.jp", +"murakami.niigata.jp", +"myoko.niigata.jp", +"nagaoka.niigata.jp", +"niigata.niigata.jp", +"ojiya.niigata.jp", +"omi.niigata.jp", +"sado.niigata.jp", +"sanjo.niigata.jp", +"seiro.niigata.jp", +"seirou.niigata.jp", +"sekikawa.niigata.jp", +"shibata.niigata.jp", +"tagami.niigata.jp", +"tainai.niigata.jp", +"tochio.niigata.jp", +"tokamachi.niigata.jp", +"tsubame.niigata.jp", +"tsunan.niigata.jp", +"uonuma.niigata.jp", +"yahiko.niigata.jp", +"yoita.niigata.jp", +"yuzawa.niigata.jp", +"beppu.oita.jp", +"bungoono.oita.jp", +"bungotakada.oita.jp", +"hasama.oita.jp", +"hiji.oita.jp", +"himeshima.oita.jp", +"hita.oita.jp", +"kamitsue.oita.jp", +"kokonoe.oita.jp", +"kuju.oita.jp", +"kunisaki.oita.jp", +"kusu.oita.jp", +"oita.oita.jp", +"saiki.oita.jp", +"taketa.oita.jp", +"tsukumi.oita.jp", +"usa.oita.jp", +"usuki.oita.jp", +"yufu.oita.jp", +"akaiwa.okayama.jp", +"asakuchi.okayama.jp", +"bizen.okayama.jp", +"hayashima.okayama.jp", +"ibara.okayama.jp", +"kagamino.okayama.jp", +"kasaoka.okayama.jp", +"kibichuo.okayama.jp", +"kumenan.okayama.jp", +"kurashiki.okayama.jp", +"maniwa.okayama.jp", +"misaki.okayama.jp", +"nagi.okayama.jp", +"niimi.okayama.jp", +"nishiawakura.okayama.jp", +"okayama.okayama.jp", +"satosho.okayama.jp", +"setouchi.okayama.jp", +"shinjo.okayama.jp", +"shoo.okayama.jp", +"soja.okayama.jp", +"takahashi.okayama.jp", +"tamano.okayama.jp", +"tsuyama.okayama.jp", +"wake.okayama.jp", +"yakage.okayama.jp", +"aguni.okinawa.jp", +"ginowan.okinawa.jp", +"ginoza.okinawa.jp", +"gushikami.okinawa.jp", +"haebaru.okinawa.jp", +"higashi.okinawa.jp", +"hirara.okinawa.jp", +"iheya.okinawa.jp", +"ishigaki.okinawa.jp", +"ishikawa.okinawa.jp", +"itoman.okinawa.jp", +"izena.okinawa.jp", +"kadena.okinawa.jp", +"kin.okinawa.jp", +"kitadaito.okinawa.jp", +"kitanakagusuku.okinawa.jp", +"kumejima.okinawa.jp", +"kunigami.okinawa.jp", +"minamidaito.okinawa.jp", +"motobu.okinawa.jp", +"nago.okinawa.jp", +"naha.okinawa.jp", +"nakagusuku.okinawa.jp", +"nakijin.okinawa.jp", +"nanjo.okinawa.jp", +"nishihara.okinawa.jp", +"ogimi.okinawa.jp", +"okinawa.okinawa.jp", +"onna.okinawa.jp", +"shimoji.okinawa.jp", +"taketomi.okinawa.jp", +"tarama.okinawa.jp", +"tokashiki.okinawa.jp", +"tomigusuku.okinawa.jp", +"tonaki.okinawa.jp", +"urasoe.okinawa.jp", +"uruma.okinawa.jp", +"yaese.okinawa.jp", +"yomitan.okinawa.jp", +"yonabaru.okinawa.jp", +"yonaguni.okinawa.jp", +"zamami.okinawa.jp", +"abeno.osaka.jp", +"chihayaakasaka.osaka.jp", +"chuo.osaka.jp", +"daito.osaka.jp", +"fujiidera.osaka.jp", +"habikino.osaka.jp", +"hannan.osaka.jp", +"higashiosaka.osaka.jp", +"higashisumiyoshi.osaka.jp", +"higashiyodogawa.osaka.jp", +"hirakata.osaka.jp", +"ibaraki.osaka.jp", +"ikeda.osaka.jp", +"izumi.osaka.jp", +"izumiotsu.osaka.jp", +"izumisano.osaka.jp", +"kadoma.osaka.jp", +"kaizuka.osaka.jp", +"kanan.osaka.jp", +"kashiwara.osaka.jp", +"katano.osaka.jp", +"kawachinagano.osaka.jp", +"kishiwada.osaka.jp", +"kita.osaka.jp", +"kumatori.osaka.jp", +"matsubara.osaka.jp", +"minato.osaka.jp", +"minoh.osaka.jp", +"misaki.osaka.jp", +"moriguchi.osaka.jp", +"neyagawa.osaka.jp", +"nishi.osaka.jp", +"nose.osaka.jp", +"osakasayama.osaka.jp", +"sakai.osaka.jp", +"sayama.osaka.jp", +"sennan.osaka.jp", +"settsu.osaka.jp", +"shijonawate.osaka.jp", +"shimamoto.osaka.jp", +"suita.osaka.jp", +"tadaoka.osaka.jp", +"taishi.osaka.jp", +"tajiri.osaka.jp", +"takaishi.osaka.jp", +"takatsuki.osaka.jp", +"tondabayashi.osaka.jp", +"toyonaka.osaka.jp", +"toyono.osaka.jp", +"yao.osaka.jp", +"ariake.saga.jp", +"arita.saga.jp", +"fukudomi.saga.jp", +"genkai.saga.jp", +"hamatama.saga.jp", +"hizen.saga.jp", +"imari.saga.jp", +"kamimine.saga.jp", +"kanzaki.saga.jp", +"karatsu.saga.jp", +"kashima.saga.jp", +"kitagata.saga.jp", +"kitahata.saga.jp", +"kiyama.saga.jp", +"kouhoku.saga.jp", +"kyuragi.saga.jp", +"nishiarita.saga.jp", +"ogi.saga.jp", +"omachi.saga.jp", +"ouchi.saga.jp", +"saga.saga.jp", +"shiroishi.saga.jp", +"taku.saga.jp", +"tara.saga.jp", +"tosu.saga.jp", +"yoshinogari.saga.jp", +"arakawa.saitama.jp", +"asaka.saitama.jp", +"chichibu.saitama.jp", +"fujimi.saitama.jp", +"fujimino.saitama.jp", +"fukaya.saitama.jp", +"hanno.saitama.jp", +"hanyu.saitama.jp", +"hasuda.saitama.jp", +"hatogaya.saitama.jp", +"hatoyama.saitama.jp", +"hidaka.saitama.jp", +"higashichichibu.saitama.jp", +"higashimatsuyama.saitama.jp", +"honjo.saitama.jp", +"ina.saitama.jp", +"iruma.saitama.jp", +"iwatsuki.saitama.jp", +"kamiizumi.saitama.jp", +"kamikawa.saitama.jp", +"kamisato.saitama.jp", +"kasukabe.saitama.jp", +"kawagoe.saitama.jp", +"kawaguchi.saitama.jp", +"kawajima.saitama.jp", +"kazo.saitama.jp", +"kitamoto.saitama.jp", +"koshigaya.saitama.jp", +"kounosu.saitama.jp", +"kuki.saitama.jp", +"kumagaya.saitama.jp", +"matsubushi.saitama.jp", +"minano.saitama.jp", +"misato.saitama.jp", +"miyashiro.saitama.jp", +"miyoshi.saitama.jp", +"moroyama.saitama.jp", +"nagatoro.saitama.jp", +"namegawa.saitama.jp", +"niiza.saitama.jp", +"ogano.saitama.jp", +"ogawa.saitama.jp", +"ogose.saitama.jp", +"okegawa.saitama.jp", +"omiya.saitama.jp", +"otaki.saitama.jp", +"ranzan.saitama.jp", +"ryokami.saitama.jp", +"saitama.saitama.jp", +"sakado.saitama.jp", +"satte.saitama.jp", +"sayama.saitama.jp", +"shiki.saitama.jp", +"shiraoka.saitama.jp", +"soka.saitama.jp", +"sugito.saitama.jp", +"toda.saitama.jp", +"tokigawa.saitama.jp", +"tokorozawa.saitama.jp", +"tsurugashima.saitama.jp", +"urawa.saitama.jp", +"warabi.saitama.jp", +"yashio.saitama.jp", +"yokoze.saitama.jp", +"yono.saitama.jp", +"yorii.saitama.jp", +"yoshida.saitama.jp", +"yoshikawa.saitama.jp", +"yoshimi.saitama.jp", +"aisho.shiga.jp", +"gamo.shiga.jp", +"higashiomi.shiga.jp", +"hikone.shiga.jp", +"koka.shiga.jp", +"konan.shiga.jp", +"kosei.shiga.jp", +"koto.shiga.jp", +"kusatsu.shiga.jp", +"maibara.shiga.jp", +"moriyama.shiga.jp", +"nagahama.shiga.jp", +"nishiazai.shiga.jp", +"notogawa.shiga.jp", +"omihachiman.shiga.jp", +"otsu.shiga.jp", +"ritto.shiga.jp", +"ryuoh.shiga.jp", +"takashima.shiga.jp", +"takatsuki.shiga.jp", +"torahime.shiga.jp", +"toyosato.shiga.jp", +"yasu.shiga.jp", +"akagi.shimane.jp", +"ama.shimane.jp", +"gotsu.shimane.jp", +"hamada.shimane.jp", +"higashiizumo.shimane.jp", +"hikawa.shimane.jp", +"hikimi.shimane.jp", +"izumo.shimane.jp", +"kakinoki.shimane.jp", +"masuda.shimane.jp", +"matsue.shimane.jp", +"misato.shimane.jp", +"nishinoshima.shimane.jp", +"ohda.shimane.jp", +"okinoshima.shimane.jp", +"okuizumo.shimane.jp", +"shimane.shimane.jp", +"tamayu.shimane.jp", +"tsuwano.shimane.jp", +"unnan.shimane.jp", +"yakumo.shimane.jp", +"yasugi.shimane.jp", +"yatsuka.shimane.jp", +"arai.shizuoka.jp", +"atami.shizuoka.jp", +"fuji.shizuoka.jp", +"fujieda.shizuoka.jp", +"fujikawa.shizuoka.jp", +"fujinomiya.shizuoka.jp", +"fukuroi.shizuoka.jp", +"gotemba.shizuoka.jp", +"haibara.shizuoka.jp", +"hamamatsu.shizuoka.jp", +"higashiizu.shizuoka.jp", +"ito.shizuoka.jp", +"iwata.shizuoka.jp", +"izu.shizuoka.jp", +"izunokuni.shizuoka.jp", +"kakegawa.shizuoka.jp", +"kannami.shizuoka.jp", +"kawanehon.shizuoka.jp", +"kawazu.shizuoka.jp", +"kikugawa.shizuoka.jp", +"kosai.shizuoka.jp", +"makinohara.shizuoka.jp", +"matsuzaki.shizuoka.jp", +"minamiizu.shizuoka.jp", +"mishima.shizuoka.jp", +"morimachi.shizuoka.jp", +"nishiizu.shizuoka.jp", +"numazu.shizuoka.jp", +"omaezaki.shizuoka.jp", +"shimada.shizuoka.jp", +"shimizu.shizuoka.jp", +"shimoda.shizuoka.jp", +"shizuoka.shizuoka.jp", +"susono.shizuoka.jp", +"yaizu.shizuoka.jp", +"yoshida.shizuoka.jp", +"ashikaga.tochigi.jp", +"bato.tochigi.jp", +"haga.tochigi.jp", +"ichikai.tochigi.jp", +"iwafune.tochigi.jp", +"kaminokawa.tochigi.jp", +"kanuma.tochigi.jp", +"karasuyama.tochigi.jp", +"kuroiso.tochigi.jp", +"mashiko.tochigi.jp", +"mibu.tochigi.jp", +"moka.tochigi.jp", +"motegi.tochigi.jp", +"nasu.tochigi.jp", +"nasushiobara.tochigi.jp", +"nikko.tochigi.jp", +"nishikata.tochigi.jp", +"nogi.tochigi.jp", +"ohira.tochigi.jp", +"ohtawara.tochigi.jp", +"oyama.tochigi.jp", +"sakura.tochigi.jp", +"sano.tochigi.jp", +"shimotsuke.tochigi.jp", +"shioya.tochigi.jp", +"takanezawa.tochigi.jp", +"tochigi.tochigi.jp", +"tsuga.tochigi.jp", +"ujiie.tochigi.jp", +"utsunomiya.tochigi.jp", +"yaita.tochigi.jp", +"aizumi.tokushima.jp", +"anan.tokushima.jp", +"ichiba.tokushima.jp", +"itano.tokushima.jp", +"kainan.tokushima.jp", +"komatsushima.tokushima.jp", +"matsushige.tokushima.jp", +"mima.tokushima.jp", +"minami.tokushima.jp", +"miyoshi.tokushima.jp", +"mugi.tokushima.jp", +"nakagawa.tokushima.jp", +"naruto.tokushima.jp", +"sanagochi.tokushima.jp", +"shishikui.tokushima.jp", +"tokushima.tokushima.jp", +"wajiki.tokushima.jp", +"adachi.tokyo.jp", +"akiruno.tokyo.jp", +"akishima.tokyo.jp", +"aogashima.tokyo.jp", +"arakawa.tokyo.jp", +"bunkyo.tokyo.jp", +"chiyoda.tokyo.jp", +"chofu.tokyo.jp", +"chuo.tokyo.jp", +"edogawa.tokyo.jp", +"fuchu.tokyo.jp", +"fussa.tokyo.jp", +"hachijo.tokyo.jp", +"hachioji.tokyo.jp", +"hamura.tokyo.jp", +"higashikurume.tokyo.jp", +"higashimurayama.tokyo.jp", +"higashiyamato.tokyo.jp", +"hino.tokyo.jp", +"hinode.tokyo.jp", +"hinohara.tokyo.jp", +"inagi.tokyo.jp", +"itabashi.tokyo.jp", +"katsushika.tokyo.jp", +"kita.tokyo.jp", +"kiyose.tokyo.jp", +"kodaira.tokyo.jp", +"koganei.tokyo.jp", +"kokubunji.tokyo.jp", +"komae.tokyo.jp", +"koto.tokyo.jp", +"kouzushima.tokyo.jp", +"kunitachi.tokyo.jp", +"machida.tokyo.jp", +"meguro.tokyo.jp", +"minato.tokyo.jp", +"mitaka.tokyo.jp", +"mizuho.tokyo.jp", +"musashimurayama.tokyo.jp", +"musashino.tokyo.jp", +"nakano.tokyo.jp", +"nerima.tokyo.jp", +"ogasawara.tokyo.jp", +"okutama.tokyo.jp", +"ome.tokyo.jp", +"oshima.tokyo.jp", +"ota.tokyo.jp", +"setagaya.tokyo.jp", +"shibuya.tokyo.jp", +"shinagawa.tokyo.jp", +"shinjuku.tokyo.jp", +"suginami.tokyo.jp", +"sumida.tokyo.jp", +"tachikawa.tokyo.jp", +"taito.tokyo.jp", +"tama.tokyo.jp", +"toshima.tokyo.jp", +"chizu.tottori.jp", +"hino.tottori.jp", +"kawahara.tottori.jp", +"koge.tottori.jp", +"kotoura.tottori.jp", +"misasa.tottori.jp", +"nanbu.tottori.jp", +"nichinan.tottori.jp", +"sakaiminato.tottori.jp", +"tottori.tottori.jp", +"wakasa.tottori.jp", +"yazu.tottori.jp", +"yonago.tottori.jp", +"asahi.toyama.jp", +"fuchu.toyama.jp", +"fukumitsu.toyama.jp", +"funahashi.toyama.jp", +"himi.toyama.jp", +"imizu.toyama.jp", +"inami.toyama.jp", +"johana.toyama.jp", +"kamiichi.toyama.jp", +"kurobe.toyama.jp", +"nakaniikawa.toyama.jp", +"namerikawa.toyama.jp", +"nanto.toyama.jp", +"nyuzen.toyama.jp", +"oyabe.toyama.jp", +"taira.toyama.jp", +"takaoka.toyama.jp", +"tateyama.toyama.jp", +"toga.toyama.jp", +"tonami.toyama.jp", +"toyama.toyama.jp", +"unazuki.toyama.jp", +"uozu.toyama.jp", +"yamada.toyama.jp", +"arida.wakayama.jp", +"aridagawa.wakayama.jp", +"gobo.wakayama.jp", +"hashimoto.wakayama.jp", +"hidaka.wakayama.jp", +"hirogawa.wakayama.jp", +"inami.wakayama.jp", +"iwade.wakayama.jp", +"kainan.wakayama.jp", +"kamitonda.wakayama.jp", +"katsuragi.wakayama.jp", +"kimino.wakayama.jp", +"kinokawa.wakayama.jp", +"kitayama.wakayama.jp", +"koya.wakayama.jp", +"koza.wakayama.jp", +"kozagawa.wakayama.jp", +"kudoyama.wakayama.jp", +"kushimoto.wakayama.jp", +"mihama.wakayama.jp", +"misato.wakayama.jp", +"nachikatsuura.wakayama.jp", +"shingu.wakayama.jp", +"shirahama.wakayama.jp", +"taiji.wakayama.jp", +"tanabe.wakayama.jp", +"wakayama.wakayama.jp", +"yuasa.wakayama.jp", +"yura.wakayama.jp", +"asahi.yamagata.jp", +"funagata.yamagata.jp", +"higashine.yamagata.jp", +"iide.yamagata.jp", +"kahoku.yamagata.jp", +"kaminoyama.yamagata.jp", +"kaneyama.yamagata.jp", +"kawanishi.yamagata.jp", +"mamurogawa.yamagata.jp", +"mikawa.yamagata.jp", +"murayama.yamagata.jp", +"nagai.yamagata.jp", +"nakayama.yamagata.jp", +"nanyo.yamagata.jp", +"nishikawa.yamagata.jp", +"obanazawa.yamagata.jp", +"oe.yamagata.jp", +"oguni.yamagata.jp", +"ohkura.yamagata.jp", +"oishida.yamagata.jp", +"sagae.yamagata.jp", +"sakata.yamagata.jp", +"sakegawa.yamagata.jp", +"shinjo.yamagata.jp", +"shirataka.yamagata.jp", +"shonai.yamagata.jp", +"takahata.yamagata.jp", +"tendo.yamagata.jp", +"tozawa.yamagata.jp", +"tsuruoka.yamagata.jp", +"yamagata.yamagata.jp", +"yamanobe.yamagata.jp", +"yonezawa.yamagata.jp", +"yuza.yamagata.jp", +"abu.yamaguchi.jp", +"hagi.yamaguchi.jp", +"hikari.yamaguchi.jp", +"hofu.yamaguchi.jp", +"iwakuni.yamaguchi.jp", +"kudamatsu.yamaguchi.jp", +"mitou.yamaguchi.jp", +"nagato.yamaguchi.jp", +"oshima.yamaguchi.jp", +"shimonoseki.yamaguchi.jp", +"shunan.yamaguchi.jp", +"tabuse.yamaguchi.jp", +"tokuyama.yamaguchi.jp", +"toyota.yamaguchi.jp", +"ube.yamaguchi.jp", +"yuu.yamaguchi.jp", +"chuo.yamanashi.jp", +"doshi.yamanashi.jp", +"fuefuki.yamanashi.jp", +"fujikawa.yamanashi.jp", +"fujikawaguchiko.yamanashi.jp", +"fujiyoshida.yamanashi.jp", +"hayakawa.yamanashi.jp", +"hokuto.yamanashi.jp", +"ichikawamisato.yamanashi.jp", +"kai.yamanashi.jp", +"kofu.yamanashi.jp", +"koshu.yamanashi.jp", +"kosuge.yamanashi.jp", +"minami-alps.yamanashi.jp", +"minobu.yamanashi.jp", +"nakamichi.yamanashi.jp", +"nanbu.yamanashi.jp", +"narusawa.yamanashi.jp", +"nirasaki.yamanashi.jp", +"nishikatsura.yamanashi.jp", +"oshino.yamanashi.jp", +"otsuki.yamanashi.jp", +"showa.yamanashi.jp", +"tabayama.yamanashi.jp", +"tsuru.yamanashi.jp", +"uenohara.yamanashi.jp", +"yamanakako.yamanashi.jp", +"yamanashi.yamanashi.jp", +"ke", +"ac.ke", +"co.ke", +"go.ke", +"info.ke", +"me.ke", +"mobi.ke", +"ne.ke", +"or.ke", +"sc.ke", +"kg", +"org.kg", +"net.kg", +"com.kg", +"edu.kg", +"gov.kg", +"mil.kg", +"*.kh", +"ki", +"edu.ki", +"biz.ki", +"net.ki", +"org.ki", +"gov.ki", +"info.ki", +"com.ki", +"km", +"org.km", +"nom.km", +"gov.km", +"prd.km", +"tm.km", +"edu.km", +"mil.km", +"ass.km", +"com.km", +"coop.km", +"asso.km", +"presse.km", +"medecin.km", +"notaires.km", +"pharmaciens.km", +"veterinaire.km", +"gouv.km", +"kn", +"net.kn", +"org.kn", +"edu.kn", +"gov.kn", +"kp", +"com.kp", +"edu.kp", +"gov.kp", +"org.kp", +"rep.kp", +"tra.kp", +"kr", +"ac.kr", +"co.kr", +"es.kr", +"go.kr", +"hs.kr", +"kg.kr", +"mil.kr", +"ms.kr", +"ne.kr", +"or.kr", +"pe.kr", +"re.kr", +"sc.kr", +"busan.kr", +"chungbuk.kr", +"chungnam.kr", +"daegu.kr", +"daejeon.kr", +"gangwon.kr", +"gwangju.kr", +"gyeongbuk.kr", +"gyeonggi.kr", +"gyeongnam.kr", +"incheon.kr", +"jeju.kr", +"jeonbuk.kr", +"jeonnam.kr", +"seoul.kr", +"ulsan.kr", +"kw", +"com.kw", +"edu.kw", +"emb.kw", +"gov.kw", +"ind.kw", +"net.kw", +"org.kw", +"ky", +"edu.ky", +"gov.ky", +"com.ky", +"org.ky", +"net.ky", +"kz", +"org.kz", +"edu.kz", +"net.kz", +"gov.kz", +"mil.kz", +"com.kz", +"la", +"int.la", +"net.la", +"info.la", +"edu.la", +"gov.la", +"per.la", +"com.la", +"org.la", +"lb", +"com.lb", +"edu.lb", +"gov.lb", +"net.lb", +"org.lb", +"lc", +"com.lc", +"net.lc", +"co.lc", +"org.lc", +"edu.lc", +"gov.lc", +"li", +"lk", +"gov.lk", +"sch.lk", +"net.lk", +"int.lk", +"com.lk", +"org.lk", +"edu.lk", +"ngo.lk", +"soc.lk", +"web.lk", +"ltd.lk", +"assn.lk", +"grp.lk", +"hotel.lk", +"ac.lk", +"lr", +"com.lr", +"edu.lr", +"gov.lr", +"org.lr", +"net.lr", +"ls", +"ac.ls", +"biz.ls", +"co.ls", +"edu.ls", +"gov.ls", +"info.ls", +"net.ls", +"org.ls", +"sc.ls", +"lt", +"gov.lt", +"lu", +"lv", +"com.lv", +"edu.lv", +"gov.lv", +"org.lv", +"mil.lv", +"id.lv", +"net.lv", +"asn.lv", +"conf.lv", +"ly", +"com.ly", +"net.ly", +"gov.ly", +"plc.ly", +"edu.ly", +"sch.ly", +"med.ly", +"org.ly", +"id.ly", +"ma", +"co.ma", +"net.ma", +"gov.ma", +"org.ma", +"ac.ma", +"press.ma", +"mc", +"tm.mc", +"asso.mc", +"md", +"me", +"co.me", +"net.me", +"org.me", +"edu.me", +"ac.me", +"gov.me", +"its.me", +"priv.me", +"mg", +"org.mg", +"nom.mg", +"gov.mg", +"prd.mg", +"tm.mg", +"edu.mg", +"mil.mg", +"com.mg", +"co.mg", +"mh", +"mil", +"mk", +"com.mk", +"org.mk", +"net.mk", +"edu.mk", +"gov.mk", +"inf.mk", +"name.mk", +"ml", +"com.ml", +"edu.ml", +"gouv.ml", +"gov.ml", +"net.ml", +"org.ml", +"presse.ml", +"*.mm", +"mn", +"gov.mn", +"edu.mn", +"org.mn", +"mo", +"com.mo", +"net.mo", +"org.mo", +"edu.mo", +"gov.mo", +"mobi", +"mp", +"mq", +"mr", +"gov.mr", +"ms", +"com.ms", +"edu.ms", +"gov.ms", +"net.ms", +"org.ms", +"mt", +"com.mt", +"edu.mt", +"net.mt", +"org.mt", +"mu", +"com.mu", +"net.mu", +"org.mu", +"gov.mu", +"ac.mu", +"co.mu", +"or.mu", +"museum", +"academy.museum", +"agriculture.museum", +"air.museum", +"airguard.museum", +"alabama.museum", +"alaska.museum", +"amber.museum", +"ambulance.museum", +"american.museum", +"americana.museum", +"americanantiques.museum", +"americanart.museum", +"amsterdam.museum", +"and.museum", +"annefrank.museum", +"anthro.museum", +"anthropology.museum", +"antiques.museum", +"aquarium.museum", +"arboretum.museum", +"archaeological.museum", +"archaeology.museum", +"architecture.museum", +"art.museum", +"artanddesign.museum", +"artcenter.museum", +"artdeco.museum", +"arteducation.museum", +"artgallery.museum", +"arts.museum", +"artsandcrafts.museum", +"asmatart.museum", +"assassination.museum", +"assisi.museum", +"association.museum", +"astronomy.museum", +"atlanta.museum", +"austin.museum", +"australia.museum", +"automotive.museum", +"aviation.museum", +"axis.museum", +"badajoz.museum", +"baghdad.museum", +"bahn.museum", +"bale.museum", +"baltimore.museum", +"barcelona.museum", +"baseball.museum", +"basel.museum", +"baths.museum", +"bauern.museum", +"beauxarts.museum", +"beeldengeluid.museum", +"bellevue.museum", +"bergbau.museum", +"berkeley.museum", +"berlin.museum", +"bern.museum", +"bible.museum", +"bilbao.museum", +"bill.museum", +"birdart.museum", +"birthplace.museum", +"bonn.museum", +"boston.museum", +"botanical.museum", +"botanicalgarden.museum", +"botanicgarden.museum", +"botany.museum", +"brandywinevalley.museum", +"brasil.museum", +"bristol.museum", +"british.museum", +"britishcolumbia.museum", +"broadcast.museum", +"brunel.museum", +"brussel.museum", +"brussels.museum", +"bruxelles.museum", +"building.museum", +"burghof.museum", +"bus.museum", +"bushey.museum", +"cadaques.museum", +"california.museum", +"cambridge.museum", +"can.museum", +"canada.museum", +"capebreton.museum", +"carrier.museum", +"cartoonart.museum", +"casadelamoneda.museum", +"castle.museum", +"castres.museum", +"celtic.museum", +"center.museum", +"chattanooga.museum", +"cheltenham.museum", +"chesapeakebay.museum", +"chicago.museum", +"children.museum", +"childrens.museum", +"childrensgarden.museum", +"chiropractic.museum", +"chocolate.museum", +"christiansburg.museum", +"cincinnati.museum", +"cinema.museum", +"circus.museum", +"civilisation.museum", +"civilization.museum", +"civilwar.museum", +"clinton.museum", +"clock.museum", +"coal.museum", +"coastaldefence.museum", +"cody.museum", +"coldwar.museum", +"collection.museum", +"colonialwilliamsburg.museum", +"coloradoplateau.museum", +"columbia.museum", +"columbus.museum", +"communication.museum", +"communications.museum", +"community.museum", +"computer.museum", +"computerhistory.museum", +"comunicações.museum", +"contemporary.museum", +"contemporaryart.museum", +"convent.museum", +"copenhagen.museum", +"corporation.museum", +"correios-e-telecomunicações.museum", +"corvette.museum", +"costume.museum", +"countryestate.museum", +"county.museum", +"crafts.museum", +"cranbrook.museum", +"creation.museum", +"cultural.museum", +"culturalcenter.museum", +"culture.museum", +"cyber.museum", +"cymru.museum", +"dali.museum", +"dallas.museum", +"database.museum", +"ddr.museum", +"decorativearts.museum", +"delaware.museum", +"delmenhorst.museum", +"denmark.museum", +"depot.museum", +"design.museum", +"detroit.museum", +"dinosaur.museum", +"discovery.museum", +"dolls.museum", +"donostia.museum", +"durham.museum", +"eastafrica.museum", +"eastcoast.museum", +"education.museum", +"educational.museum", +"egyptian.museum", +"eisenbahn.museum", +"elburg.museum", +"elvendrell.museum", +"embroidery.museum", +"encyclopedic.museum", +"england.museum", +"entomology.museum", +"environment.museum", +"environmentalconservation.museum", +"epilepsy.museum", +"essex.museum", +"estate.museum", +"ethnology.museum", +"exeter.museum", +"exhibition.museum", +"family.museum", +"farm.museum", +"farmequipment.museum", +"farmers.museum", +"farmstead.museum", +"field.museum", +"figueres.museum", +"filatelia.museum", +"film.museum", +"fineart.museum", +"finearts.museum", +"finland.museum", +"flanders.museum", +"florida.museum", +"force.museum", +"fortmissoula.museum", +"fortworth.museum", +"foundation.museum", +"francaise.museum", +"frankfurt.museum", +"franziskaner.museum", +"freemasonry.museum", +"freiburg.museum", +"fribourg.museum", +"frog.museum", +"fundacio.museum", +"furniture.museum", +"gallery.museum", +"garden.museum", +"gateway.museum", +"geelvinck.museum", +"gemological.museum", +"geology.museum", +"georgia.museum", +"giessen.museum", +"glas.museum", +"glass.museum", +"gorge.museum", +"grandrapids.museum", +"graz.museum", +"guernsey.museum", +"halloffame.museum", +"hamburg.museum", +"handson.museum", +"harvestcelebration.museum", +"hawaii.museum", +"health.museum", +"heimatunduhren.museum", +"hellas.museum", +"helsinki.museum", +"hembygdsforbund.museum", +"heritage.museum", +"histoire.museum", +"historical.museum", +"historicalsociety.museum", +"historichouses.museum", +"historisch.museum", +"historisches.museum", +"history.museum", +"historyofscience.museum", +"horology.museum", +"house.museum", +"humanities.museum", +"illustration.museum", +"imageandsound.museum", +"indian.museum", +"indiana.museum", +"indianapolis.museum", +"indianmarket.museum", +"intelligence.museum", +"interactive.museum", +"iraq.museum", +"iron.museum", +"isleofman.museum", +"jamison.museum", +"jefferson.museum", +"jerusalem.museum", +"jewelry.museum", +"jewish.museum", +"jewishart.museum", +"jfk.museum", +"journalism.museum", +"judaica.museum", +"judygarland.museum", +"juedisches.museum", +"juif.museum", +"karate.museum", +"karikatur.museum", +"kids.museum", +"koebenhavn.museum", +"koeln.museum", +"kunst.museum", +"kunstsammlung.museum", +"kunstunddesign.museum", +"labor.museum", +"labour.museum", +"lajolla.museum", +"lancashire.museum", +"landes.museum", +"lans.museum", +"läns.museum", +"larsson.museum", +"lewismiller.museum", +"lincoln.museum", +"linz.museum", +"living.museum", +"livinghistory.museum", +"localhistory.museum", +"london.museum", +"losangeles.museum", +"louvre.museum", +"loyalist.museum", +"lucerne.museum", +"luxembourg.museum", +"luzern.museum", +"mad.museum", +"madrid.museum", +"mallorca.museum", +"manchester.museum", +"mansion.museum", +"mansions.museum", +"manx.museum", +"marburg.museum", +"maritime.museum", +"maritimo.museum", +"maryland.museum", +"marylhurst.museum", +"media.museum", +"medical.museum", +"medizinhistorisches.museum", +"meeres.museum", +"memorial.museum", +"mesaverde.museum", +"michigan.museum", +"midatlantic.museum", +"military.museum", +"mill.museum", +"miners.museum", +"mining.museum", +"minnesota.museum", +"missile.museum", +"missoula.museum", +"modern.museum", +"moma.museum", +"money.museum", +"monmouth.museum", +"monticello.museum", +"montreal.museum", +"moscow.museum", +"motorcycle.museum", +"muenchen.museum", +"muenster.museum", +"mulhouse.museum", +"muncie.museum", +"museet.museum", +"museumcenter.museum", +"museumvereniging.museum", +"music.museum", +"national.museum", +"nationalfirearms.museum", +"nationalheritage.museum", +"nativeamerican.museum", +"naturalhistory.museum", +"naturalhistorymuseum.museum", +"naturalsciences.museum", +"nature.museum", +"naturhistorisches.museum", +"natuurwetenschappen.museum", +"naumburg.museum", +"naval.museum", +"nebraska.museum", +"neues.museum", +"newhampshire.museum", +"newjersey.museum", +"newmexico.museum", +"newport.museum", +"newspaper.museum", +"newyork.museum", +"niepce.museum", +"norfolk.museum", +"north.museum", +"nrw.museum", +"nyc.museum", +"nyny.museum", +"oceanographic.museum", +"oceanographique.museum", +"omaha.museum", +"online.museum", +"ontario.museum", +"openair.museum", +"oregon.museum", +"oregontrail.museum", +"otago.museum", +"oxford.museum", +"pacific.museum", +"paderborn.museum", +"palace.museum", +"paleo.museum", +"palmsprings.museum", +"panama.museum", +"paris.museum", +"pasadena.museum", +"pharmacy.museum", +"philadelphia.museum", +"philadelphiaarea.museum", +"philately.museum", +"phoenix.museum", +"photography.museum", +"pilots.museum", +"pittsburgh.museum", +"planetarium.museum", +"plantation.museum", +"plants.museum", +"plaza.museum", +"portal.museum", +"portland.museum", +"portlligat.museum", +"posts-and-telecommunications.museum", +"preservation.museum", +"presidio.museum", +"press.museum", +"project.museum", +"public.museum", +"pubol.museum", +"quebec.museum", +"railroad.museum", +"railway.museum", +"research.museum", +"resistance.museum", +"riodejaneiro.museum", +"rochester.museum", +"rockart.museum", +"roma.museum", +"russia.museum", +"saintlouis.museum", +"salem.museum", +"salvadordali.museum", +"salzburg.museum", +"sandiego.museum", +"sanfrancisco.museum", +"santabarbara.museum", +"santacruz.museum", +"santafe.museum", +"saskatchewan.museum", +"satx.museum", +"savannahga.museum", +"schlesisches.museum", +"schoenbrunn.museum", +"schokoladen.museum", +"school.museum", +"schweiz.museum", +"science.museum", +"scienceandhistory.museum", +"scienceandindustry.museum", +"sciencecenter.museum", +"sciencecenters.museum", +"science-fiction.museum", +"sciencehistory.museum", +"sciences.museum", +"sciencesnaturelles.museum", +"scotland.museum", +"seaport.museum", +"settlement.museum", +"settlers.museum", +"shell.museum", +"sherbrooke.museum", +"sibenik.museum", +"silk.museum", +"ski.museum", +"skole.museum", +"society.museum", +"sologne.museum", +"soundandvision.museum", +"southcarolina.museum", +"southwest.museum", +"space.museum", +"spy.museum", +"square.museum", +"stadt.museum", +"stalbans.museum", +"starnberg.museum", +"state.museum", +"stateofdelaware.museum", +"station.museum", +"steam.museum", +"steiermark.museum", +"stjohn.museum", +"stockholm.museum", +"stpetersburg.museum", +"stuttgart.museum", +"suisse.museum", +"surgeonshall.museum", +"surrey.museum", +"svizzera.museum", +"sweden.museum", +"sydney.museum", +"tank.museum", +"tcm.museum", +"technology.museum", +"telekommunikation.museum", +"television.museum", +"texas.museum", +"textile.museum", +"theater.museum", +"time.museum", +"timekeeping.museum", +"topology.museum", +"torino.museum", +"touch.museum", +"town.museum", +"transport.museum", +"tree.museum", +"trolley.museum", +"trust.museum", +"trustee.museum", +"uhren.museum", +"ulm.museum", +"undersea.museum", +"university.museum", +"usa.museum", +"usantiques.museum", +"usarts.museum", +"uscountryestate.museum", +"usculture.museum", +"usdecorativearts.museum", +"usgarden.museum", +"ushistory.museum", +"ushuaia.museum", +"uslivinghistory.museum", +"utah.museum", +"uvic.museum", +"valley.museum", +"vantaa.museum", +"versailles.museum", +"viking.museum", +"village.museum", +"virginia.museum", +"virtual.museum", +"virtuel.museum", +"vlaanderen.museum", +"volkenkunde.museum", +"wales.museum", +"wallonie.museum", +"war.museum", +"washingtondc.museum", +"watchandclock.museum", +"watch-and-clock.museum", +"western.museum", +"westfalen.museum", +"whaling.museum", +"wildlife.museum", +"williamsburg.museum", +"windmill.museum", +"workshop.museum", +"york.museum", +"yorkshire.museum", +"yosemite.museum", +"youth.museum", +"zoological.museum", +"zoology.museum", +"ירושלים.museum", +"иком.museum", +"mv", +"aero.mv", +"biz.mv", +"com.mv", +"coop.mv", +"edu.mv", +"gov.mv", +"info.mv", +"int.mv", +"mil.mv", +"museum.mv", +"name.mv", +"net.mv", +"org.mv", +"pro.mv", +"mw", +"ac.mw", +"biz.mw", +"co.mw", +"com.mw", +"coop.mw", +"edu.mw", +"gov.mw", +"int.mw", +"museum.mw", +"net.mw", +"org.mw", +"mx", +"com.mx", +"org.mx", +"gob.mx", +"edu.mx", +"net.mx", +"my", +"com.my", +"net.my", +"org.my", +"gov.my", +"edu.my", +"mil.my", +"name.my", +"mz", +"ac.mz", +"adv.mz", +"co.mz", +"edu.mz", +"gov.mz", +"mil.mz", +"net.mz", +"org.mz", +"na", +"info.na", +"pro.na", +"name.na", +"school.na", +"or.na", +"dr.na", +"us.na", +"mx.na", +"ca.na", +"in.na", +"cc.na", +"tv.na", +"ws.na", +"mobi.na", +"co.na", +"com.na", +"org.na", +"name", +"nc", +"asso.nc", +"nom.nc", +"ne", +"net", +"nf", +"com.nf", +"net.nf", +"per.nf", +"rec.nf", +"web.nf", +"arts.nf", +"firm.nf", +"info.nf", +"other.nf", +"store.nf", +"ng", +"com.ng", +"edu.ng", +"gov.ng", +"i.ng", +"mil.ng", +"mobi.ng", +"name.ng", +"net.ng", +"org.ng", +"sch.ng", +"ni", +"ac.ni", +"biz.ni", +"co.ni", +"com.ni", +"edu.ni", +"gob.ni", +"in.ni", +"info.ni", +"int.ni", +"mil.ni", +"net.ni", +"nom.ni", +"org.ni", +"web.ni", +"nl", +"no", +"fhs.no", +"vgs.no", +"fylkesbibl.no", +"folkebibl.no", +"museum.no", +"idrett.no", +"priv.no", +"mil.no", +"stat.no", +"dep.no", +"kommune.no", +"herad.no", +"aa.no", +"ah.no", +"bu.no", +"fm.no", +"hl.no", +"hm.no", +"jan-mayen.no", +"mr.no", +"nl.no", +"nt.no", +"of.no", +"ol.no", +"oslo.no", +"rl.no", +"sf.no", +"st.no", +"svalbard.no", +"tm.no", +"tr.no", +"va.no", +"vf.no", +"gs.aa.no", +"gs.ah.no", +"gs.bu.no", +"gs.fm.no", +"gs.hl.no", +"gs.hm.no", +"gs.jan-mayen.no", +"gs.mr.no", +"gs.nl.no", +"gs.nt.no", +"gs.of.no", +"gs.ol.no", +"gs.oslo.no", +"gs.rl.no", +"gs.sf.no", +"gs.st.no", +"gs.svalbard.no", +"gs.tm.no", +"gs.tr.no", +"gs.va.no", +"gs.vf.no", +"akrehamn.no", +"åkrehamn.no", +"algard.no", +"ålgård.no", +"arna.no", +"brumunddal.no", +"bryne.no", +"bronnoysund.no", +"brønnøysund.no", +"drobak.no", +"drøbak.no", +"egersund.no", +"fetsund.no", +"floro.no", +"florø.no", +"fredrikstad.no", +"hokksund.no", +"honefoss.no", +"hønefoss.no", +"jessheim.no", +"jorpeland.no", +"jørpeland.no", +"kirkenes.no", +"kopervik.no", +"krokstadelva.no", +"langevag.no", +"langevåg.no", +"leirvik.no", +"mjondalen.no", +"mjøndalen.no", +"mo-i-rana.no", +"mosjoen.no", +"mosjøen.no", +"nesoddtangen.no", +"orkanger.no", +"osoyro.no", +"osøyro.no", +"raholt.no", +"råholt.no", +"sandnessjoen.no", +"sandnessjøen.no", +"skedsmokorset.no", +"slattum.no", +"spjelkavik.no", +"stathelle.no", +"stavern.no", +"stjordalshalsen.no", +"stjørdalshalsen.no", +"tananger.no", +"tranby.no", +"vossevangen.no", +"afjord.no", +"åfjord.no", +"agdenes.no", +"al.no", +"ål.no", +"alesund.no", +"ålesund.no", +"alstahaug.no", +"alta.no", +"áltá.no", +"alaheadju.no", +"álaheadju.no", +"alvdal.no", +"amli.no", +"åmli.no", +"amot.no", +"åmot.no", +"andebu.no", +"andoy.no", +"andøy.no", +"andasuolo.no", +"ardal.no", +"årdal.no", +"aremark.no", +"arendal.no", +"ås.no", +"aseral.no", +"åseral.no", +"asker.no", +"askim.no", +"askvoll.no", +"askoy.no", +"askøy.no", +"asnes.no", +"åsnes.no", +"audnedaln.no", +"aukra.no", +"aure.no", +"aurland.no", +"aurskog-holand.no", +"aurskog-høland.no", +"austevoll.no", +"austrheim.no", +"averoy.no", +"averøy.no", +"balestrand.no", +"ballangen.no", +"balat.no", +"bálát.no", +"balsfjord.no", +"bahccavuotna.no", +"báhccavuotna.no", +"bamble.no", +"bardu.no", +"beardu.no", +"beiarn.no", +"bajddar.no", +"bájddar.no", +"baidar.no", +"báidár.no", +"berg.no", +"bergen.no", +"berlevag.no", +"berlevåg.no", +"bearalvahki.no", +"bearalváhki.no", +"bindal.no", +"birkenes.no", +"bjarkoy.no", +"bjarkøy.no", +"bjerkreim.no", +"bjugn.no", +"bodo.no", +"bodø.no", +"badaddja.no", +"bådåddjå.no", +"budejju.no", +"bokn.no", +"bremanger.no", +"bronnoy.no", +"brønnøy.no", +"bygland.no", +"bykle.no", +"barum.no", +"bærum.no", +"bo.telemark.no", +"bø.telemark.no", +"bo.nordland.no", +"bø.nordland.no", +"bievat.no", +"bievát.no", +"bomlo.no", +"bømlo.no", +"batsfjord.no", +"båtsfjord.no", +"bahcavuotna.no", +"báhcavuotna.no", +"dovre.no", +"drammen.no", +"drangedal.no", +"dyroy.no", +"dyrøy.no", +"donna.no", +"dønna.no", +"eid.no", +"eidfjord.no", +"eidsberg.no", +"eidskog.no", +"eidsvoll.no", +"eigersund.no", +"elverum.no", +"enebakk.no", +"engerdal.no", +"etne.no", +"etnedal.no", +"evenes.no", +"evenassi.no", +"evenášši.no", +"evje-og-hornnes.no", +"farsund.no", +"fauske.no", +"fuossko.no", +"fuoisku.no", +"fedje.no", +"fet.no", +"finnoy.no", +"finnøy.no", +"fitjar.no", +"fjaler.no", +"fjell.no", +"flakstad.no", +"flatanger.no", +"flekkefjord.no", +"flesberg.no", +"flora.no", +"fla.no", +"flå.no", +"folldal.no", +"forsand.no", +"fosnes.no", +"frei.no", +"frogn.no", +"froland.no", +"frosta.no", +"frana.no", +"fræna.no", +"froya.no", +"frøya.no", +"fusa.no", +"fyresdal.no", +"forde.no", +"førde.no", +"gamvik.no", +"gangaviika.no", +"gáŋgaviika.no", +"gaular.no", +"gausdal.no", +"gildeskal.no", +"gildeskål.no", +"giske.no", +"gjemnes.no", +"gjerdrum.no", +"gjerstad.no", +"gjesdal.no", +"gjovik.no", +"gjøvik.no", +"gloppen.no", +"gol.no", +"gran.no", +"grane.no", +"granvin.no", +"gratangen.no", +"grimstad.no", +"grong.no", +"kraanghke.no", +"kråanghke.no", +"grue.no", +"gulen.no", +"hadsel.no", +"halden.no", +"halsa.no", +"hamar.no", +"hamaroy.no", +"habmer.no", +"hábmer.no", +"hapmir.no", +"hápmir.no", +"hammerfest.no", +"hammarfeasta.no", +"hámmárfeasta.no", +"haram.no", +"hareid.no", +"harstad.no", +"hasvik.no", +"aknoluokta.no", +"ákŋoluokta.no", +"hattfjelldal.no", +"aarborte.no", +"haugesund.no", +"hemne.no", +"hemnes.no", +"hemsedal.no", +"heroy.more-og-romsdal.no", +"herøy.møre-og-romsdal.no", +"heroy.nordland.no", +"herøy.nordland.no", +"hitra.no", +"hjartdal.no", +"hjelmeland.no", +"hobol.no", +"hobøl.no", +"hof.no", +"hol.no", +"hole.no", +"holmestrand.no", +"holtalen.no", +"holtålen.no", +"hornindal.no", +"horten.no", +"hurdal.no", +"hurum.no", +"hvaler.no", +"hyllestad.no", +"hagebostad.no", +"hægebostad.no", +"hoyanger.no", +"høyanger.no", +"hoylandet.no", +"høylandet.no", +"ha.no", +"hå.no", +"ibestad.no", +"inderoy.no", +"inderøy.no", +"iveland.no", +"jevnaker.no", +"jondal.no", +"jolster.no", +"jølster.no", +"karasjok.no", +"karasjohka.no", +"kárášjohka.no", +"karlsoy.no", +"galsa.no", +"gálsá.no", +"karmoy.no", +"karmøy.no", +"kautokeino.no", +"guovdageaidnu.no", +"klepp.no", +"klabu.no", +"klæbu.no", +"kongsberg.no", +"kongsvinger.no", +"kragero.no", +"kragerø.no", +"kristiansand.no", +"kristiansund.no", +"krodsherad.no", +"krødsherad.no", +"kvalsund.no", +"rahkkeravju.no", +"ráhkkerávju.no", +"kvam.no", +"kvinesdal.no", +"kvinnherad.no", +"kviteseid.no", +"kvitsoy.no", +"kvitsøy.no", +"kvafjord.no", +"kvæfjord.no", +"giehtavuoatna.no", +"kvanangen.no", +"kvænangen.no", +"navuotna.no", +"návuotna.no", +"kafjord.no", +"kåfjord.no", +"gaivuotna.no", +"gáivuotna.no", +"larvik.no", +"lavangen.no", +"lavagis.no", +"loabat.no", +"loabát.no", +"lebesby.no", +"davvesiida.no", +"leikanger.no", +"leirfjord.no", +"leka.no", +"leksvik.no", +"lenvik.no", +"leangaviika.no", +"leaŋgaviika.no", +"lesja.no", +"levanger.no", +"lier.no", +"lierne.no", +"lillehammer.no", +"lillesand.no", +"lindesnes.no", +"lindas.no", +"lindås.no", +"lom.no", +"loppa.no", +"lahppi.no", +"láhppi.no", +"lund.no", +"lunner.no", +"luroy.no", +"lurøy.no", +"luster.no", +"lyngdal.no", +"lyngen.no", +"ivgu.no", +"lardal.no", +"lerdal.no", +"lærdal.no", +"lodingen.no", +"lødingen.no", +"lorenskog.no", +"lørenskog.no", +"loten.no", +"løten.no", +"malvik.no", +"masoy.no", +"måsøy.no", +"muosat.no", +"muosát.no", +"mandal.no", +"marker.no", +"marnardal.no", +"masfjorden.no", +"meland.no", +"meldal.no", +"melhus.no", +"meloy.no", +"meløy.no", +"meraker.no", +"meråker.no", +"moareke.no", +"moåreke.no", +"midsund.no", +"midtre-gauldal.no", +"modalen.no", +"modum.no", +"molde.no", +"moskenes.no", +"moss.no", +"mosvik.no", +"malselv.no", +"målselv.no", +"malatvuopmi.no", +"málatvuopmi.no", +"namdalseid.no", +"aejrie.no", +"namsos.no", +"namsskogan.no", +"naamesjevuemie.no", +"nååmesjevuemie.no", +"laakesvuemie.no", +"nannestad.no", +"narvik.no", +"narviika.no", +"naustdal.no", +"nedre-eiker.no", +"nes.akershus.no", +"nes.buskerud.no", +"nesna.no", +"nesodden.no", +"nesseby.no", +"unjarga.no", +"unjárga.no", +"nesset.no", +"nissedal.no", +"nittedal.no", +"nord-aurdal.no", +"nord-fron.no", +"nord-odal.no", +"norddal.no", +"nordkapp.no", +"davvenjarga.no", +"davvenjárga.no", +"nordre-land.no", +"nordreisa.no", +"raisa.no", +"ráisa.no", +"nore-og-uvdal.no", +"notodden.no", +"naroy.no", +"nærøy.no", +"notteroy.no", +"nøtterøy.no", +"odda.no", +"oksnes.no", +"øksnes.no", +"oppdal.no", +"oppegard.no", +"oppegård.no", +"orkdal.no", +"orland.no", +"ørland.no", +"orskog.no", +"ørskog.no", +"orsta.no", +"ørsta.no", +"os.hedmark.no", +"os.hordaland.no", +"osen.no", +"osteroy.no", +"osterøy.no", +"ostre-toten.no", +"østre-toten.no", +"overhalla.no", +"ovre-eiker.no", +"øvre-eiker.no", +"oyer.no", +"øyer.no", +"oygarden.no", +"øygarden.no", +"oystre-slidre.no", +"øystre-slidre.no", +"porsanger.no", +"porsangu.no", +"porsáŋgu.no", +"porsgrunn.no", +"radoy.no", +"radøy.no", +"rakkestad.no", +"rana.no", +"ruovat.no", +"randaberg.no", +"rauma.no", +"rendalen.no", +"rennebu.no", +"rennesoy.no", +"rennesøy.no", +"rindal.no", +"ringebu.no", +"ringerike.no", +"ringsaker.no", +"rissa.no", +"risor.no", +"risør.no", +"roan.no", +"rollag.no", +"rygge.no", +"ralingen.no", +"rælingen.no", +"rodoy.no", +"rødøy.no", +"romskog.no", +"rømskog.no", +"roros.no", +"røros.no", +"rost.no", +"røst.no", +"royken.no", +"røyken.no", +"royrvik.no", +"røyrvik.no", +"rade.no", +"råde.no", +"salangen.no", +"siellak.no", +"saltdal.no", +"salat.no", +"sálát.no", +"sálat.no", +"samnanger.no", +"sande.more-og-romsdal.no", +"sande.møre-og-romsdal.no", +"sande.vestfold.no", +"sandefjord.no", +"sandnes.no", +"sandoy.no", +"sandøy.no", +"sarpsborg.no", +"sauda.no", +"sauherad.no", +"sel.no", +"selbu.no", +"selje.no", +"seljord.no", +"sigdal.no", +"siljan.no", +"sirdal.no", +"skaun.no", +"skedsmo.no", +"ski.no", +"skien.no", +"skiptvet.no", +"skjervoy.no", +"skjervøy.no", +"skierva.no", +"skiervá.no", +"skjak.no", +"skjåk.no", +"skodje.no", +"skanland.no", +"skånland.no", +"skanit.no", +"skánit.no", +"smola.no", +"smøla.no", +"snillfjord.no", +"snasa.no", +"snåsa.no", +"snoasa.no", +"snaase.no", +"snåase.no", +"sogndal.no", +"sokndal.no", +"sola.no", +"solund.no", +"songdalen.no", +"sortland.no", +"spydeberg.no", +"stange.no", +"stavanger.no", +"steigen.no", +"steinkjer.no", +"stjordal.no", +"stjørdal.no", +"stokke.no", +"stor-elvdal.no", +"stord.no", +"stordal.no", +"storfjord.no", +"omasvuotna.no", +"strand.no", +"stranda.no", +"stryn.no", +"sula.no", +"suldal.no", +"sund.no", +"sunndal.no", +"surnadal.no", +"sveio.no", +"svelvik.no", +"sykkylven.no", +"sogne.no", +"søgne.no", +"somna.no", +"sømna.no", +"sondre-land.no", +"søndre-land.no", +"sor-aurdal.no", +"sør-aurdal.no", +"sor-fron.no", +"sør-fron.no", +"sor-odal.no", +"sør-odal.no", +"sor-varanger.no", +"sør-varanger.no", +"matta-varjjat.no", +"mátta-várjjat.no", +"sorfold.no", +"sørfold.no", +"sorreisa.no", +"sørreisa.no", +"sorum.no", +"sørum.no", +"tana.no", +"deatnu.no", +"time.no", +"tingvoll.no", +"tinn.no", +"tjeldsund.no", +"dielddanuorri.no", +"tjome.no", +"tjøme.no", +"tokke.no", +"tolga.no", +"torsken.no", +"tranoy.no", +"tranøy.no", +"tromso.no", +"tromsø.no", +"tromsa.no", +"romsa.no", +"trondheim.no", +"troandin.no", +"trysil.no", +"trana.no", +"træna.no", +"trogstad.no", +"trøgstad.no", +"tvedestrand.no", +"tydal.no", +"tynset.no", +"tysfjord.no", +"divtasvuodna.no", +"divttasvuotna.no", +"tysnes.no", +"tysvar.no", +"tysvær.no", +"tonsberg.no", +"tønsberg.no", +"ullensaker.no", +"ullensvang.no", +"ulvik.no", +"utsira.no", +"vadso.no", +"vadsø.no", +"cahcesuolo.no", +"čáhcesuolo.no", +"vaksdal.no", +"valle.no", +"vang.no", +"vanylven.no", +"vardo.no", +"vardø.no", +"varggat.no", +"várggát.no", +"vefsn.no", +"vaapste.no", +"vega.no", +"vegarshei.no", +"vegårshei.no", +"vennesla.no", +"verdal.no", +"verran.no", +"vestby.no", +"vestnes.no", +"vestre-slidre.no", +"vestre-toten.no", +"vestvagoy.no", +"vestvågøy.no", +"vevelstad.no", +"vik.no", +"vikna.no", +"vindafjord.no", +"volda.no", +"voss.no", +"varoy.no", +"værøy.no", +"vagan.no", +"vågan.no", +"voagat.no", +"vagsoy.no", +"vågsøy.no", +"vaga.no", +"vågå.no", +"valer.ostfold.no", +"våler.østfold.no", +"valer.hedmark.no", +"våler.hedmark.no", +"*.np", +"nr", +"biz.nr", +"info.nr", +"gov.nr", +"edu.nr", +"org.nr", +"net.nr", +"com.nr", +"nu", +"nz", +"ac.nz", +"co.nz", +"cri.nz", +"geek.nz", +"gen.nz", +"govt.nz", +"health.nz", +"iwi.nz", +"kiwi.nz", +"maori.nz", +"mil.nz", +"māori.nz", +"net.nz", +"org.nz", +"parliament.nz", +"school.nz", +"om", +"co.om", +"com.om", +"edu.om", +"gov.om", +"med.om", +"museum.om", +"net.om", +"org.om", +"pro.om", +"onion", +"org", +"pa", +"ac.pa", +"gob.pa", +"com.pa", +"org.pa", +"sld.pa", +"edu.pa", +"net.pa", +"ing.pa", +"abo.pa", +"med.pa", +"nom.pa", +"pe", +"edu.pe", +"gob.pe", +"nom.pe", +"mil.pe", +"org.pe", +"com.pe", +"net.pe", +"pf", +"com.pf", +"org.pf", +"edu.pf", +"*.pg", +"ph", +"com.ph", +"net.ph", +"org.ph", +"gov.ph", +"edu.ph", +"ngo.ph", +"mil.ph", +"i.ph", +"pk", +"com.pk", +"net.pk", +"edu.pk", +"org.pk", +"fam.pk", +"biz.pk", +"web.pk", +"gov.pk", +"gob.pk", +"gok.pk", +"gon.pk", +"gop.pk", +"gos.pk", +"info.pk", +"pl", +"com.pl", +"net.pl", +"org.pl", +"aid.pl", +"agro.pl", +"atm.pl", +"auto.pl", +"biz.pl", +"edu.pl", +"gmina.pl", +"gsm.pl", +"info.pl", +"mail.pl", +"miasta.pl", +"media.pl", +"mil.pl", +"nieruchomosci.pl", +"nom.pl", +"pc.pl", +"powiat.pl", +"priv.pl", +"realestate.pl", +"rel.pl", +"sex.pl", +"shop.pl", +"sklep.pl", +"sos.pl", +"szkola.pl", +"targi.pl", +"tm.pl", +"tourism.pl", +"travel.pl", +"turystyka.pl", +"gov.pl", +"ap.gov.pl", +"ic.gov.pl", +"is.gov.pl", +"us.gov.pl", +"kmpsp.gov.pl", +"kppsp.gov.pl", +"kwpsp.gov.pl", +"psp.gov.pl", +"wskr.gov.pl", +"kwp.gov.pl", +"mw.gov.pl", +"ug.gov.pl", +"um.gov.pl", +"umig.gov.pl", +"ugim.gov.pl", +"upow.gov.pl", +"uw.gov.pl", +"starostwo.gov.pl", +"pa.gov.pl", +"po.gov.pl", +"psse.gov.pl", +"pup.gov.pl", +"rzgw.gov.pl", +"sa.gov.pl", +"so.gov.pl", +"sr.gov.pl", +"wsa.gov.pl", +"sko.gov.pl", +"uzs.gov.pl", +"wiih.gov.pl", +"winb.gov.pl", +"pinb.gov.pl", +"wios.gov.pl", +"witd.gov.pl", +"wzmiuw.gov.pl", +"piw.gov.pl", +"wiw.gov.pl", +"griw.gov.pl", +"wif.gov.pl", +"oum.gov.pl", +"sdn.gov.pl", +"zp.gov.pl", +"uppo.gov.pl", +"mup.gov.pl", +"wuoz.gov.pl", +"konsulat.gov.pl", +"oirm.gov.pl", +"augustow.pl", +"babia-gora.pl", +"bedzin.pl", +"beskidy.pl", +"bialowieza.pl", +"bialystok.pl", +"bielawa.pl", +"bieszczady.pl", +"boleslawiec.pl", +"bydgoszcz.pl", +"bytom.pl", +"cieszyn.pl", +"czeladz.pl", +"czest.pl", +"dlugoleka.pl", +"elblag.pl", +"elk.pl", +"glogow.pl", +"gniezno.pl", +"gorlice.pl", +"grajewo.pl", +"ilawa.pl", +"jaworzno.pl", +"jelenia-gora.pl", +"jgora.pl", +"kalisz.pl", +"kazimierz-dolny.pl", +"karpacz.pl", +"kartuzy.pl", +"kaszuby.pl", +"katowice.pl", +"kepno.pl", +"ketrzyn.pl", +"klodzko.pl", +"kobierzyce.pl", +"kolobrzeg.pl", +"konin.pl", +"konskowola.pl", +"kutno.pl", +"lapy.pl", +"lebork.pl", +"legnica.pl", +"lezajsk.pl", +"limanowa.pl", +"lomza.pl", +"lowicz.pl", +"lubin.pl", +"lukow.pl", +"malbork.pl", +"malopolska.pl", +"mazowsze.pl", +"mazury.pl", +"mielec.pl", +"mielno.pl", +"mragowo.pl", +"naklo.pl", +"nowaruda.pl", +"nysa.pl", +"olawa.pl", +"olecko.pl", +"olkusz.pl", +"olsztyn.pl", +"opoczno.pl", +"opole.pl", +"ostroda.pl", +"ostroleka.pl", +"ostrowiec.pl", +"ostrowwlkp.pl", +"pila.pl", +"pisz.pl", +"podhale.pl", +"podlasie.pl", +"polkowice.pl", +"pomorze.pl", +"pomorskie.pl", +"prochowice.pl", +"pruszkow.pl", +"przeworsk.pl", +"pulawy.pl", +"radom.pl", +"rawa-maz.pl", +"rybnik.pl", +"rzeszow.pl", +"sanok.pl", +"sejny.pl", +"slask.pl", +"slupsk.pl", +"sosnowiec.pl", +"stalowa-wola.pl", +"skoczow.pl", +"starachowice.pl", +"stargard.pl", +"suwalki.pl", +"swidnica.pl", +"swiebodzin.pl", +"swinoujscie.pl", +"szczecin.pl", +"szczytno.pl", +"tarnobrzeg.pl", +"tgory.pl", +"turek.pl", +"tychy.pl", +"ustka.pl", +"walbrzych.pl", +"warmia.pl", +"warszawa.pl", +"waw.pl", +"wegrow.pl", +"wielun.pl", +"wlocl.pl", +"wloclawek.pl", +"wodzislaw.pl", +"wolomin.pl", +"wroclaw.pl", +"zachpomor.pl", +"zagan.pl", +"zarow.pl", +"zgora.pl", +"zgorzelec.pl", +"pm", +"pn", +"gov.pn", +"co.pn", +"org.pn", +"edu.pn", +"net.pn", +"post", +"pr", +"com.pr", +"net.pr", +"org.pr", +"gov.pr", +"edu.pr", +"isla.pr", +"pro.pr", +"biz.pr", +"info.pr", +"name.pr", +"est.pr", +"prof.pr", +"ac.pr", +"pro", +"aaa.pro", +"aca.pro", +"acct.pro", +"avocat.pro", +"bar.pro", +"cpa.pro", +"eng.pro", +"jur.pro", +"law.pro", +"med.pro", +"recht.pro", +"ps", +"edu.ps", +"gov.ps", +"sec.ps", +"plo.ps", +"com.ps", +"org.ps", +"net.ps", +"pt", +"net.pt", +"gov.pt", +"org.pt", +"edu.pt", +"int.pt", +"publ.pt", +"com.pt", +"nome.pt", +"pw", +"co.pw", +"ne.pw", +"or.pw", +"ed.pw", +"go.pw", +"belau.pw", +"py", +"com.py", +"coop.py", +"edu.py", +"gov.py", +"mil.py", +"net.py", +"org.py", +"qa", +"com.qa", +"edu.qa", +"gov.qa", +"mil.qa", +"name.qa", +"net.qa", +"org.qa", +"sch.qa", +"re", +"asso.re", +"com.re", +"nom.re", +"ro", +"arts.ro", +"com.ro", +"firm.ro", +"info.ro", +"nom.ro", +"nt.ro", +"org.ro", +"rec.ro", +"store.ro", +"tm.ro", +"www.ro", +"rs", +"ac.rs", +"co.rs", +"edu.rs", +"gov.rs", +"in.rs", +"org.rs", +"ru", +"rw", +"ac.rw", +"co.rw", +"coop.rw", +"gov.rw", +"mil.rw", +"net.rw", +"org.rw", +"sa", +"com.sa", +"net.sa", +"org.sa", +"gov.sa", +"med.sa", +"pub.sa", +"edu.sa", +"sch.sa", +"sb", +"com.sb", +"edu.sb", +"gov.sb", +"net.sb", +"org.sb", +"sc", +"com.sc", +"gov.sc", +"net.sc", +"org.sc", +"edu.sc", +"sd", +"com.sd", +"net.sd", +"org.sd", +"edu.sd", +"med.sd", +"tv.sd", +"gov.sd", +"info.sd", +"se", +"a.se", +"ac.se", +"b.se", +"bd.se", +"brand.se", +"c.se", +"d.se", +"e.se", +"f.se", +"fh.se", +"fhsk.se", +"fhv.se", +"g.se", +"h.se", +"i.se", +"k.se", +"komforb.se", +"kommunalforbund.se", +"komvux.se", +"l.se", +"lanbib.se", +"m.se", +"n.se", +"naturbruksgymn.se", +"o.se", +"org.se", +"p.se", +"parti.se", +"pp.se", +"press.se", +"r.se", +"s.se", +"t.se", +"tm.se", +"u.se", +"w.se", +"x.se", +"y.se", +"z.se", +"sg", +"com.sg", +"net.sg", +"org.sg", +"gov.sg", +"edu.sg", +"per.sg", +"sh", +"com.sh", +"net.sh", +"gov.sh", +"org.sh", +"mil.sh", +"si", +"sj", +"sk", +"sl", +"com.sl", +"net.sl", +"edu.sl", +"gov.sl", +"org.sl", +"sm", +"sn", +"art.sn", +"com.sn", +"edu.sn", +"gouv.sn", +"org.sn", +"perso.sn", +"univ.sn", +"so", +"com.so", +"edu.so", +"gov.so", +"me.so", +"net.so", +"org.so", +"sr", +"ss", +"biz.ss", +"com.ss", +"edu.ss", +"gov.ss", +"net.ss", +"org.ss", +"st", +"co.st", +"com.st", +"consulado.st", +"edu.st", +"embaixada.st", +"gov.st", +"mil.st", +"net.st", +"org.st", +"principe.st", +"saotome.st", +"store.st", +"su", +"sv", +"com.sv", +"edu.sv", +"gob.sv", +"org.sv", +"red.sv", +"sx", +"gov.sx", +"sy", +"edu.sy", +"gov.sy", +"net.sy", +"mil.sy", +"com.sy", +"org.sy", +"sz", +"co.sz", +"ac.sz", +"org.sz", +"tc", +"td", +"tel", +"tf", +"tg", +"th", +"ac.th", +"co.th", +"go.th", +"in.th", +"mi.th", +"net.th", +"or.th", +"tj", +"ac.tj", +"biz.tj", +"co.tj", +"com.tj", +"edu.tj", +"go.tj", +"gov.tj", +"int.tj", +"mil.tj", +"name.tj", +"net.tj", +"nic.tj", +"org.tj", +"test.tj", +"web.tj", +"tk", +"tl", +"gov.tl", +"tm", +"com.tm", +"co.tm", +"org.tm", +"net.tm", +"nom.tm", +"gov.tm", +"mil.tm", +"edu.tm", +"tn", +"com.tn", +"ens.tn", +"fin.tn", +"gov.tn", +"ind.tn", +"intl.tn", +"nat.tn", +"net.tn", +"org.tn", +"info.tn", +"perso.tn", +"tourism.tn", +"edunet.tn", +"rnrt.tn", +"rns.tn", +"rnu.tn", +"mincom.tn", +"agrinet.tn", +"defense.tn", +"turen.tn", +"to", +"com.to", +"gov.to", +"net.to", +"org.to", +"edu.to", +"mil.to", +"tr", +"av.tr", +"bbs.tr", +"bel.tr", +"biz.tr", +"com.tr", +"dr.tr", +"edu.tr", +"gen.tr", +"gov.tr", +"info.tr", +"mil.tr", +"k12.tr", +"kep.tr", +"name.tr", +"net.tr", +"org.tr", +"pol.tr", +"tel.tr", +"tsk.tr", +"tv.tr", +"web.tr", +"nc.tr", +"gov.nc.tr", +"tt", +"co.tt", +"com.tt", +"org.tt", +"net.tt", +"biz.tt", +"info.tt", +"pro.tt", +"int.tt", +"coop.tt", +"jobs.tt", +"mobi.tt", +"travel.tt", +"museum.tt", +"aero.tt", +"name.tt", +"gov.tt", +"edu.tt", +"tv", +"tw", +"edu.tw", +"gov.tw", +"mil.tw", +"com.tw", +"net.tw", +"org.tw", +"idv.tw", +"game.tw", +"ebiz.tw", +"club.tw", +"網路.tw", +"組織.tw", +"商業.tw", +"tz", +"ac.tz", +"co.tz", +"go.tz", +"hotel.tz", +"info.tz", +"me.tz", +"mil.tz", +"mobi.tz", +"ne.tz", +"or.tz", +"sc.tz", +"tv.tz", +"ua", +"com.ua", +"edu.ua", +"gov.ua", +"in.ua", +"net.ua", +"org.ua", +"cherkassy.ua", +"cherkasy.ua", +"chernigov.ua", +"chernihiv.ua", +"chernivtsi.ua", +"chernovtsy.ua", +"ck.ua", +"cn.ua", +"cr.ua", +"crimea.ua", +"cv.ua", +"dn.ua", +"dnepropetrovsk.ua", +"dnipropetrovsk.ua", +"dominic.ua", +"donetsk.ua", +"dp.ua", +"if.ua", +"ivano-frankivsk.ua", +"kh.ua", +"kharkiv.ua", +"kharkov.ua", +"kherson.ua", +"khmelnitskiy.ua", +"khmelnytskyi.ua", +"kiev.ua", +"kirovograd.ua", +"km.ua", +"kr.ua", +"krym.ua", +"ks.ua", +"kv.ua", +"kyiv.ua", +"lg.ua", +"lt.ua", +"lugansk.ua", +"lutsk.ua", +"lv.ua", +"lviv.ua", +"mk.ua", +"mykolaiv.ua", +"nikolaev.ua", +"od.ua", +"odesa.ua", +"odessa.ua", +"pl.ua", +"poltava.ua", +"rivne.ua", +"rovno.ua", +"rv.ua", +"sb.ua", +"sebastopol.ua", +"sevastopol.ua", +"sm.ua", +"sumy.ua", +"te.ua", +"ternopil.ua", +"uz.ua", +"uzhgorod.ua", +"vinnica.ua", +"vinnytsia.ua", +"vn.ua", +"volyn.ua", +"yalta.ua", +"zaporizhzhe.ua", +"zaporizhzhia.ua", +"zhitomir.ua", +"zhytomyr.ua", +"zp.ua", +"zt.ua", +"ug", +"co.ug", +"or.ug", +"ac.ug", +"sc.ug", +"go.ug", +"ne.ug", +"com.ug", +"org.ug", +"uk", +"ac.uk", +"co.uk", +"gov.uk", +"ltd.uk", +"me.uk", +"net.uk", +"nhs.uk", +"org.uk", +"plc.uk", +"police.uk", +"*.sch.uk", +"us", +"dni.us", +"fed.us", +"isa.us", +"kids.us", +"nsn.us", +"ak.us", +"al.us", +"ar.us", +"as.us", +"az.us", +"ca.us", +"co.us", +"ct.us", +"dc.us", +"de.us", +"fl.us", +"ga.us", +"gu.us", +"hi.us", +"ia.us", +"id.us", +"il.us", +"in.us", +"ks.us", +"ky.us", +"la.us", +"ma.us", +"md.us", +"me.us", +"mi.us", +"mn.us", +"mo.us", +"ms.us", +"mt.us", +"nc.us", +"nd.us", +"ne.us", +"nh.us", +"nj.us", +"nm.us", +"nv.us", +"ny.us", +"oh.us", +"ok.us", +"or.us", +"pa.us", +"pr.us", +"ri.us", +"sc.us", +"sd.us", +"tn.us", +"tx.us", +"ut.us", +"vi.us", +"vt.us", +"va.us", +"wa.us", +"wi.us", +"wv.us", +"wy.us", +"k12.ak.us", +"k12.al.us", +"k12.ar.us", +"k12.as.us", +"k12.az.us", +"k12.ca.us", +"k12.co.us", +"k12.ct.us", +"k12.dc.us", +"k12.de.us", +"k12.fl.us", +"k12.ga.us", +"k12.gu.us", +"k12.ia.us", +"k12.id.us", +"k12.il.us", +"k12.in.us", +"k12.ks.us", +"k12.ky.us", +"k12.la.us", +"k12.ma.us", +"k12.md.us", +"k12.me.us", +"k12.mi.us", +"k12.mn.us", +"k12.mo.us", +"k12.ms.us", +"k12.mt.us", +"k12.nc.us", +"k12.ne.us", +"k12.nh.us", +"k12.nj.us", +"k12.nm.us", +"k12.nv.us", +"k12.ny.us", +"k12.oh.us", +"k12.ok.us", +"k12.or.us", +"k12.pa.us", +"k12.pr.us", +"k12.ri.us", +"k12.sc.us", +"k12.tn.us", +"k12.tx.us", +"k12.ut.us", +"k12.vi.us", +"k12.vt.us", +"k12.va.us", +"k12.wa.us", +"k12.wi.us", +"k12.wy.us", +"cc.ak.us", +"cc.al.us", +"cc.ar.us", +"cc.as.us", +"cc.az.us", +"cc.ca.us", +"cc.co.us", +"cc.ct.us", +"cc.dc.us", +"cc.de.us", +"cc.fl.us", +"cc.ga.us", +"cc.gu.us", +"cc.hi.us", +"cc.ia.us", +"cc.id.us", +"cc.il.us", +"cc.in.us", +"cc.ks.us", +"cc.ky.us", +"cc.la.us", +"cc.ma.us", +"cc.md.us", +"cc.me.us", +"cc.mi.us", +"cc.mn.us", +"cc.mo.us", +"cc.ms.us", +"cc.mt.us", +"cc.nc.us", +"cc.nd.us", +"cc.ne.us", +"cc.nh.us", +"cc.nj.us", +"cc.nm.us", +"cc.nv.us", +"cc.ny.us", +"cc.oh.us", +"cc.ok.us", +"cc.or.us", +"cc.pa.us", +"cc.pr.us", +"cc.ri.us", +"cc.sc.us", +"cc.sd.us", +"cc.tn.us", +"cc.tx.us", +"cc.ut.us", +"cc.vi.us", +"cc.vt.us", +"cc.va.us", +"cc.wa.us", +"cc.wi.us", +"cc.wv.us", +"cc.wy.us", +"lib.ak.us", +"lib.al.us", +"lib.ar.us", +"lib.as.us", +"lib.az.us", +"lib.ca.us", +"lib.co.us", +"lib.ct.us", +"lib.dc.us", +"lib.fl.us", +"lib.ga.us", +"lib.gu.us", +"lib.hi.us", +"lib.ia.us", +"lib.id.us", +"lib.il.us", +"lib.in.us", +"lib.ks.us", +"lib.ky.us", +"lib.la.us", +"lib.ma.us", +"lib.md.us", +"lib.me.us", +"lib.mi.us", +"lib.mn.us", +"lib.mo.us", +"lib.ms.us", +"lib.mt.us", +"lib.nc.us", +"lib.nd.us", +"lib.ne.us", +"lib.nh.us", +"lib.nj.us", +"lib.nm.us", +"lib.nv.us", +"lib.ny.us", +"lib.oh.us", +"lib.ok.us", +"lib.or.us", +"lib.pa.us", +"lib.pr.us", +"lib.ri.us", +"lib.sc.us", +"lib.sd.us", +"lib.tn.us", +"lib.tx.us", +"lib.ut.us", +"lib.vi.us", +"lib.vt.us", +"lib.va.us", +"lib.wa.us", +"lib.wi.us", +"lib.wy.us", +"pvt.k12.ma.us", +"chtr.k12.ma.us", +"paroch.k12.ma.us", +"ann-arbor.mi.us", +"cog.mi.us", +"dst.mi.us", +"eaton.mi.us", +"gen.mi.us", +"mus.mi.us", +"tec.mi.us", +"washtenaw.mi.us", +"uy", +"com.uy", +"edu.uy", +"gub.uy", +"mil.uy", +"net.uy", +"org.uy", +"uz", +"co.uz", +"com.uz", +"net.uz", +"org.uz", +"va", +"vc", +"com.vc", +"net.vc", +"org.vc", +"gov.vc", +"mil.vc", +"edu.vc", +"ve", +"arts.ve", +"co.ve", +"com.ve", +"e12.ve", +"edu.ve", +"firm.ve", +"gob.ve", +"gov.ve", +"info.ve", +"int.ve", +"mil.ve", +"net.ve", +"org.ve", +"rec.ve", +"store.ve", +"tec.ve", +"web.ve", +"vg", +"vi", +"co.vi", +"com.vi", +"k12.vi", +"net.vi", +"org.vi", +"vn", +"com.vn", +"net.vn", +"org.vn", +"edu.vn", +"gov.vn", +"int.vn", +"ac.vn", +"biz.vn", +"info.vn", +"name.vn", +"pro.vn", +"health.vn", +"vu", +"com.vu", +"edu.vu", +"net.vu", +"org.vu", +"wf", +"ws", +"com.ws", +"net.ws", +"org.ws", +"gov.ws", +"edu.ws", +"yt", +"امارات", +"հայ", +"বাংলা", +"бг", +"бел", +"中国", +"中國", +"الجزائر", +"مصر", +"ею", +"ευ", +"موريتانيا", +"გე", +"ελ", +"香港", +"公司.香港", +"教育.香港", +"政府.香港", +"個人.香港", +"網絡.香港", +"組織.香港", +"ಭಾರತ", +"ଭାରତ", +"ভাৰত", +"भारतम्", +"भारोत", +"ڀارت", +"ഭാരതം", +"भारत", +"بارت", +"بھارت", +"భారత్", +"ભારત", +"ਭਾਰਤ", +"ভারত", +"இந்தியா", +"ایران", +"ايران", +"عراق", +"الاردن", +"한국", +"қаз", +"ලංකා", +"இலங்கை", +"المغرب", +"мкд", +"мон", +"澳門", +"澳门", +"مليسيا", +"عمان", +"پاکستان", +"پاكستان", +"فلسطين", +"срб", +"пр.срб", +"орг.срб", +"обр.срб", +"од.срб", +"упр.срб", +"ак.срб", +"рф", +"قطر", +"السعودية", +"السعودیة", +"السعودیۃ", +"السعوديه", +"سودان", +"新加坡", +"சிங்கப்பூர்", +"سورية", +"سوريا", +"ไทย", +"ศึกษา.ไทย", +"ธุรกิจ.ไทย", +"รัฐบาล.ไทย", +"ทหาร.ไทย", +"เน็ต.ไทย", +"องค์กร.ไทย", +"تونس", +"台灣", +"台湾", +"臺灣", +"укр", +"اليمن", +"xxx", +"*.ye", +"ac.za", +"agric.za", +"alt.za", +"co.za", +"edu.za", +"gov.za", +"grondar.za", +"law.za", +"mil.za", +"net.za", +"ngo.za", +"nic.za", +"nis.za", +"nom.za", +"org.za", +"school.za", +"tm.za", +"web.za", +"zm", +"ac.zm", +"biz.zm", +"co.zm", +"com.zm", +"edu.zm", +"gov.zm", +"info.zm", +"mil.zm", +"net.zm", +"org.zm", +"sch.zm", +"zw", +"ac.zw", +"co.zw", +"gov.zw", +"mil.zw", +"org.zw", +"aaa", +"aarp", +"abarth", +"abb", +"abbott", +"abbvie", +"abc", +"able", +"abogado", +"abudhabi", +"academy", +"accenture", +"accountant", +"accountants", +"aco", +"actor", +"adac", +"ads", +"adult", +"aeg", +"aetna", +"afamilycompany", +"afl", +"africa", +"agakhan", +"agency", +"aig", +"aigo", +"airbus", +"airforce", +"airtel", +"akdn", +"alfaromeo", +"alibaba", +"alipay", +"allfinanz", +"allstate", +"ally", +"alsace", +"alstom", +"amazon", +"americanexpress", +"americanfamily", +"amex", +"amfam", +"amica", +"amsterdam", +"analytics", +"android", +"anquan", +"anz", +"aol", +"apartments", +"app", +"apple", +"aquarelle", +"arab", +"aramco", +"archi", +"army", +"art", +"arte", +"asda", +"associates", +"athleta", +"attorney", +"auction", +"audi", +"audible", +"audio", +"auspost", +"author", +"auto", +"autos", +"avianca", +"aws", +"axa", +"azure", +"baby", +"baidu", +"banamex", +"bananarepublic", +"band", +"bank", +"bar", +"barcelona", +"barclaycard", +"barclays", +"barefoot", +"bargains", +"baseball", +"basketball", +"bauhaus", +"bayern", +"bbc", +"bbt", +"bbva", +"bcg", +"bcn", +"beats", +"beauty", +"beer", +"bentley", +"berlin", +"best", +"bestbuy", +"bet", +"bharti", +"bible", +"bid", +"bike", +"bing", +"bingo", +"bio", +"black", +"blackfriday", +"blockbuster", +"blog", +"bloomberg", +"blue", +"bms", +"bmw", +"bnpparibas", +"boats", +"boehringer", +"bofa", +"bom", +"bond", +"boo", +"book", +"booking", +"bosch", +"bostik", +"boston", +"bot", +"boutique", +"box", +"bradesco", +"bridgestone", +"broadway", +"broker", +"brother", +"brussels", +"budapest", +"bugatti", +"build", +"builders", +"business", +"buy", +"buzz", +"bzh", +"cab", +"cafe", +"cal", +"call", +"calvinklein", +"cam", +"camera", +"camp", +"cancerresearch", +"canon", +"capetown", +"capital", +"capitalone", +"car", +"caravan", +"cards", +"care", +"career", +"careers", +"cars", +"casa", +"case", +"caseih", +"cash", +"casino", +"catering", +"catholic", +"cba", +"cbn", +"cbre", +"cbs", +"ceb", +"center", +"ceo", +"cern", +"cfa", +"cfd", +"chanel", +"channel", +"charity", +"chase", +"chat", +"cheap", +"chintai", +"christmas", +"chrome", +"church", +"cipriani", +"circle", +"cisco", +"citadel", +"citi", +"citic", +"city", +"cityeats", +"claims", +"cleaning", +"click", +"clinic", +"clinique", +"clothing", +"cloud", +"club", +"clubmed", +"coach", +"codes", +"coffee", +"college", +"cologne", +"comcast", +"commbank", +"community", +"company", +"compare", +"computer", +"comsec", +"condos", +"construction", +"consulting", +"contact", +"contractors", +"cooking", +"cookingchannel", +"cool", +"corsica", +"country", +"coupon", +"coupons", +"courses", +"cpa", +"credit", +"creditcard", +"creditunion", +"cricket", +"crown", +"crs", +"cruise", +"cruises", +"csc", +"cuisinella", +"cymru", +"cyou", +"dabur", +"dad", +"dance", +"data", +"date", +"dating", +"datsun", +"day", +"dclk", +"dds", +"deal", +"dealer", +"deals", +"degree", +"delivery", +"dell", +"deloitte", +"delta", +"democrat", +"dental", +"dentist", +"desi", +"design", +"dev", +"dhl", +"diamonds", +"diet", +"digital", +"direct", +"directory", +"discount", +"discover", +"dish", +"diy", +"dnp", +"docs", +"doctor", +"dog", +"domains", +"dot", +"download", +"drive", +"dtv", +"dubai", +"duck", +"dunlop", +"dupont", +"durban", +"dvag", +"dvr", +"earth", +"eat", +"eco", +"edeka", +"education", +"email", +"emerck", +"energy", +"engineer", +"engineering", +"enterprises", +"epson", +"equipment", +"ericsson", +"erni", +"esq", +"estate", +"esurance", +"etisalat", +"eurovision", +"eus", +"events", +"exchange", +"expert", +"exposed", +"express", +"extraspace", +"fage", +"fail", +"fairwinds", +"faith", +"family", +"fan", +"fans", +"farm", +"farmers", +"fashion", +"fast", +"fedex", +"feedback", +"ferrari", +"ferrero", +"fiat", +"fidelity", +"fido", +"film", +"final", +"finance", +"financial", +"fire", +"firestone", +"firmdale", +"fish", +"fishing", +"fit", +"fitness", +"flickr", +"flights", +"flir", +"florist", +"flowers", +"fly", +"foo", +"food", +"foodnetwork", +"football", +"ford", +"forex", +"forsale", +"forum", +"foundation", +"fox", +"free", +"fresenius", +"frl", +"frogans", +"frontdoor", +"frontier", +"ftr", +"fujitsu", +"fujixerox", +"fun", +"fund", +"furniture", +"futbol", +"fyi", +"gal", +"gallery", +"gallo", +"gallup", +"game", +"games", +"gap", +"garden", +"gay", +"gbiz", +"gdn", +"gea", +"gent", +"genting", +"george", +"ggee", +"gift", +"gifts", +"gives", +"giving", +"glade", +"glass", +"gle", +"global", +"globo", +"gmail", +"gmbh", +"gmo", +"gmx", +"godaddy", +"gold", +"goldpoint", +"golf", +"goo", +"goodyear", +"goog", +"google", +"gop", +"got", +"grainger", +"graphics", +"gratis", +"green", +"gripe", +"grocery", +"group", +"guardian", +"gucci", +"guge", +"guide", +"guitars", +"guru", +"hair", +"hamburg", +"hangout", +"haus", +"hbo", +"hdfc", +"hdfcbank", +"health", +"healthcare", +"help", +"helsinki", +"here", +"hermes", +"hgtv", +"hiphop", +"hisamitsu", +"hitachi", +"hiv", +"hkt", +"hockey", +"holdings", +"holiday", +"homedepot", +"homegoods", +"homes", +"homesense", +"honda", +"horse", +"hospital", +"host", +"hosting", +"hot", +"hoteles", +"hotels", +"hotmail", +"house", +"how", +"hsbc", +"hughes", +"hyatt", +"hyundai", +"ibm", +"icbc", +"ice", +"icu", +"ieee", +"ifm", +"ikano", +"imamat", +"imdb", +"immo", +"immobilien", +"inc", +"industries", +"infiniti", +"ing", +"ink", +"institute", +"insurance", +"insure", +"intel", +"international", +"intuit", +"investments", +"ipiranga", +"irish", +"ismaili", +"ist", +"istanbul", +"itau", +"itv", +"iveco", +"jaguar", +"java", +"jcb", +"jcp", +"jeep", +"jetzt", +"jewelry", +"jio", +"jll", +"jmp", +"jnj", +"joburg", +"jot", +"joy", +"jpmorgan", +"jprs", +"juegos", +"juniper", +"kaufen", +"kddi", +"kerryhotels", +"kerrylogistics", +"kerryproperties", +"kfh", +"kia", +"kim", +"kinder", +"kindle", +"kitchen", +"kiwi", +"koeln", +"komatsu", +"kosher", +"kpmg", +"kpn", +"krd", +"kred", +"kuokgroup", +"kyoto", +"lacaixa", +"lamborghini", +"lamer", +"lancaster", +"lancia", +"land", +"landrover", +"lanxess", +"lasalle", +"lat", +"latino", +"latrobe", +"law", +"lawyer", +"lds", +"lease", +"leclerc", +"lefrak", +"legal", +"lego", +"lexus", +"lgbt", +"lidl", +"life", +"lifeinsurance", +"lifestyle", +"lighting", +"like", +"lilly", +"limited", +"limo", +"lincoln", +"linde", +"link", +"lipsy", +"live", +"living", +"lixil", +"llc", +"llp", +"loan", +"loans", +"locker", +"locus", +"loft", +"lol", +"london", +"lotte", +"lotto", +"love", +"lpl", +"lplfinancial", +"ltd", +"ltda", +"lundbeck", +"lupin", +"luxe", +"luxury", +"macys", +"madrid", +"maif", +"maison", +"makeup", +"man", +"management", +"mango", +"map", +"market", +"marketing", +"markets", +"marriott", +"marshalls", +"maserati", +"mattel", +"mba", +"mckinsey", +"med", +"media", +"meet", +"melbourne", +"meme", +"memorial", +"men", +"menu", +"merckmsd", +"metlife", +"miami", +"microsoft", +"mini", +"mint", +"mit", +"mitsubishi", +"mlb", +"mls", +"mma", +"mobile", +"moda", +"moe", +"moi", +"mom", +"monash", +"money", +"monster", +"mormon", +"mortgage", +"moscow", +"moto", +"motorcycles", +"mov", +"movie", +"msd", +"mtn", +"mtr", +"mutual", +"nab", +"nadex", +"nagoya", +"nationwide", +"natura", +"navy", +"nba", +"nec", +"netbank", +"netflix", +"network", +"neustar", +"new", +"newholland", +"news", +"next", +"nextdirect", +"nexus", +"nfl", +"ngo", +"nhk", +"nico", +"nike", +"nikon", +"ninja", +"nissan", +"nissay", +"nokia", +"northwesternmutual", +"norton", +"now", +"nowruz", +"nowtv", +"nra", +"nrw", +"ntt", +"nyc", +"obi", +"observer", +"off", +"office", +"okinawa", +"olayan", +"olayangroup", +"oldnavy", +"ollo", +"omega", +"one", +"ong", +"onl", +"online", +"onyourside", +"ooo", +"open", +"oracle", +"orange", +"organic", +"origins", +"osaka", +"otsuka", +"ott", +"ovh", +"page", +"panasonic", +"paris", +"pars", +"partners", +"parts", +"party", +"passagens", +"pay", +"pccw", +"pet", +"pfizer", +"pharmacy", +"phd", +"philips", +"phone", +"photo", +"photography", +"photos", +"physio", +"pics", +"pictet", +"pictures", +"pid", +"pin", +"ping", +"pink", +"pioneer", +"pizza", +"place", +"play", +"playstation", +"plumbing", +"plus", +"pnc", +"pohl", +"poker", +"politie", +"porn", +"pramerica", +"praxi", +"press", +"prime", +"prod", +"productions", +"prof", +"progressive", +"promo", +"properties", +"property", +"protection", +"pru", +"prudential", +"pub", +"pwc", +"qpon", +"quebec", +"quest", +"qvc", +"racing", +"radio", +"raid", +"read", +"realestate", +"realtor", +"realty", +"recipes", +"red", +"redstone", +"redumbrella", +"rehab", +"reise", +"reisen", +"reit", +"reliance", +"ren", +"rent", +"rentals", +"repair", +"report", +"republican", +"rest", +"restaurant", +"review", +"reviews", +"rexroth", +"rich", +"richardli", +"ricoh", +"rightathome", +"ril", +"rio", +"rip", +"rmit", +"rocher", +"rocks", +"rodeo", +"rogers", +"room", +"rsvp", +"rugby", +"ruhr", +"run", +"rwe", +"ryukyu", +"saarland", +"safe", +"safety", +"sakura", +"sale", +"salon", +"samsclub", +"samsung", +"sandvik", +"sandvikcoromant", +"sanofi", +"sap", +"sarl", +"sas", +"save", +"saxo", +"sbi", +"sbs", +"sca", +"scb", +"schaeffler", +"schmidt", +"scholarships", +"school", +"schule", +"schwarz", +"science", +"scjohnson", +"scor", +"scot", +"search", +"seat", +"secure", +"security", +"seek", +"select", +"sener", +"services", +"ses", +"seven", +"sew", +"sex", +"sexy", +"sfr", +"shangrila", +"sharp", +"shaw", +"shell", +"shia", +"shiksha", +"shoes", +"shop", +"shopping", +"shouji", +"show", +"showtime", +"shriram", +"silk", +"sina", +"singles", +"site", +"ski", +"skin", +"sky", +"skype", +"sling", +"smart", +"smile", +"sncf", +"soccer", +"social", +"softbank", +"software", +"sohu", +"solar", +"solutions", +"song", +"sony", +"soy", +"spa", +"space", +"sport", +"spot", +"spreadbetting", +"srl", +"stada", +"staples", +"star", +"statebank", +"statefarm", +"stc", +"stcgroup", +"stockholm", +"storage", +"store", +"stream", +"studio", +"study", +"style", +"sucks", +"supplies", +"supply", +"support", +"surf", +"surgery", +"suzuki", +"swatch", +"swiftcover", +"swiss", +"sydney", +"symantec", +"systems", +"tab", +"taipei", +"talk", +"taobao", +"target", +"tatamotors", +"tatar", +"tattoo", +"tax", +"taxi", +"tci", +"tdk", +"team", +"tech", +"technology", +"temasek", +"tennis", +"teva", +"thd", +"theater", +"theatre", +"tiaa", +"tickets", +"tienda", +"tiffany", +"tips", +"tires", +"tirol", +"tjmaxx", +"tjx", +"tkmaxx", +"tmall", +"today", +"tokyo", +"tools", +"top", +"toray", +"toshiba", +"total", +"tours", +"town", +"toyota", +"toys", +"trade", +"trading", +"training", +"travel", +"travelchannel", +"travelers", +"travelersinsurance", +"trust", +"trv", +"tube", +"tui", +"tunes", +"tushu", +"tvs", +"ubank", +"ubs", +"unicom", +"university", +"uno", +"uol", +"ups", +"vacations", +"vana", +"vanguard", +"vegas", +"ventures", +"verisign", +"versicherung", +"vet", +"viajes", +"video", +"vig", +"viking", +"villas", +"vin", +"vip", +"virgin", +"visa", +"vision", +"viva", +"vivo", +"vlaanderen", +"vodka", +"volkswagen", +"volvo", +"vote", +"voting", +"voto", +"voyage", +"vuelos", +"wales", +"walmart", +"walter", +"wang", +"wanggou", +"watch", +"watches", +"weather", +"weatherchannel", +"webcam", +"weber", +"website", +"wed", +"wedding", +"weibo", +"weir", +"whoswho", +"wien", +"wiki", +"williamhill", +"win", +"windows", +"wine", +"winners", +"wme", +"wolterskluwer", +"woodside", +"work", +"works", +"world", +"wow", +"wtc", +"wtf", +"xbox", +"xerox", +"xfinity", +"xihuan", +"xin", +"कॉम", +"セール", +"佛山", +"慈善", +"集团", +"在线", +"大众汽车", +"点看", +"คอม", +"八卦", +"موقع", +"公益", +"公司", +"香格里拉", +"网站", +"移动", +"我爱你", +"москва", +"католик", +"онлайн", +"сайт", +"联通", +"קום", +"时尚", +"微博", +"淡马锡", +"ファッション", +"орг", +"नेट", +"ストア", +"アマゾン", +"삼성", +"商标", +"商店", +"商城", +"дети", +"ポイント", +"新闻", +"工行", +"家電", +"كوم", +"中文网", +"中信", +"娱乐", +"谷歌", +"電訊盈科", +"购物", +"クラウド", +"通販", +"网店", +"संगठन", +"餐厅", +"网络", +"ком", +"亚马逊", +"诺基亚", +"食品", +"飞利浦", +"手表", +"手机", +"ارامكو", +"العليان", +"اتصالات", +"بازار", +"ابوظبي", +"كاثوليك", +"همراه", +"닷컴", +"政府", +"شبكة", +"بيتك", +"عرب", +"机构", +"组织机构", +"健康", +"招聘", +"рус", +"珠宝", +"大拿", +"みんな", +"グーグル", +"世界", +"書籍", +"网址", +"닷넷", +"コム", +"天主教", +"游戏", +"vermögensberater", +"vermögensberatung", +"企业", +"信息", +"嘉里大酒店", +"嘉里", +"广东", +"政务", +"xyz", +"yachts", +"yahoo", +"yamaxun", +"yandex", +"yodobashi", +"yoga", +"yokohama", +"you", +"youtube", +"yun", +"zappos", +"zara", +"zero", +"zip", +"zone", +"zuerich", +"cc.ua", +"inf.ua", +"ltd.ua", +"adobeaemcloud.com", +"adobeaemcloud.net", +"*.dev.adobeaemcloud.com", +"beep.pl", +"barsy.ca", +"*.compute.estate", +"*.alces.network", +"altervista.org", +"alwaysdata.net", +"cloudfront.net", +"*.compute.amazonaws.com", +"*.compute-1.amazonaws.com", +"*.compute.amazonaws.com.cn", +"us-east-1.amazonaws.com", +"cn-north-1.eb.amazonaws.com.cn", +"cn-northwest-1.eb.amazonaws.com.cn", +"elasticbeanstalk.com", +"ap-northeast-1.elasticbeanstalk.com", +"ap-northeast-2.elasticbeanstalk.com", +"ap-northeast-3.elasticbeanstalk.com", +"ap-south-1.elasticbeanstalk.com", +"ap-southeast-1.elasticbeanstalk.com", +"ap-southeast-2.elasticbeanstalk.com", +"ca-central-1.elasticbeanstalk.com", +"eu-central-1.elasticbeanstalk.com", +"eu-west-1.elasticbeanstalk.com", +"eu-west-2.elasticbeanstalk.com", +"eu-west-3.elasticbeanstalk.com", +"sa-east-1.elasticbeanstalk.com", +"us-east-1.elasticbeanstalk.com", +"us-east-2.elasticbeanstalk.com", +"us-gov-west-1.elasticbeanstalk.com", +"us-west-1.elasticbeanstalk.com", +"us-west-2.elasticbeanstalk.com", +"*.elb.amazonaws.com", +"*.elb.amazonaws.com.cn", +"s3.amazonaws.com", +"s3-ap-northeast-1.amazonaws.com", +"s3-ap-northeast-2.amazonaws.com", +"s3-ap-south-1.amazonaws.com", +"s3-ap-southeast-1.amazonaws.com", +"s3-ap-southeast-2.amazonaws.com", +"s3-ca-central-1.amazonaws.com", +"s3-eu-central-1.amazonaws.com", +"s3-eu-west-1.amazonaws.com", +"s3-eu-west-2.amazonaws.com", +"s3-eu-west-3.amazonaws.com", +"s3-external-1.amazonaws.com", +"s3-fips-us-gov-west-1.amazonaws.com", +"s3-sa-east-1.amazonaws.com", +"s3-us-gov-west-1.amazonaws.com", +"s3-us-east-2.amazonaws.com", +"s3-us-west-1.amazonaws.com", +"s3-us-west-2.amazonaws.com", +"s3.ap-northeast-2.amazonaws.com", +"s3.ap-south-1.amazonaws.com", +"s3.cn-north-1.amazonaws.com.cn", +"s3.ca-central-1.amazonaws.com", +"s3.eu-central-1.amazonaws.com", +"s3.eu-west-2.amazonaws.com", +"s3.eu-west-3.amazonaws.com", +"s3.us-east-2.amazonaws.com", +"s3.dualstack.ap-northeast-1.amazonaws.com", +"s3.dualstack.ap-northeast-2.amazonaws.com", +"s3.dualstack.ap-south-1.amazonaws.com", +"s3.dualstack.ap-southeast-1.amazonaws.com", +"s3.dualstack.ap-southeast-2.amazonaws.com", +"s3.dualstack.ca-central-1.amazonaws.com", +"s3.dualstack.eu-central-1.amazonaws.com", +"s3.dualstack.eu-west-1.amazonaws.com", +"s3.dualstack.eu-west-2.amazonaws.com", +"s3.dualstack.eu-west-3.amazonaws.com", +"s3.dualstack.sa-east-1.amazonaws.com", +"s3.dualstack.us-east-1.amazonaws.com", +"s3.dualstack.us-east-2.amazonaws.com", +"s3-website-us-east-1.amazonaws.com", +"s3-website-us-west-1.amazonaws.com", +"s3-website-us-west-2.amazonaws.com", +"s3-website-ap-northeast-1.amazonaws.com", +"s3-website-ap-southeast-1.amazonaws.com", +"s3-website-ap-southeast-2.amazonaws.com", +"s3-website-eu-west-1.amazonaws.com", +"s3-website-sa-east-1.amazonaws.com", +"s3-website.ap-northeast-2.amazonaws.com", +"s3-website.ap-south-1.amazonaws.com", +"s3-website.ca-central-1.amazonaws.com", +"s3-website.eu-central-1.amazonaws.com", +"s3-website.eu-west-2.amazonaws.com", +"s3-website.eu-west-3.amazonaws.com", +"s3-website.us-east-2.amazonaws.com", +"amsw.nl", +"t3l3p0rt.net", +"tele.amune.org", +"apigee.io", +"on-aptible.com", +"user.aseinet.ne.jp", +"gv.vc", +"d.gv.vc", +"user.party.eus", +"pimienta.org", +"poivron.org", +"potager.org", +"sweetpepper.org", +"myasustor.com", +"myfritz.net", +"*.awdev.ca", +"*.advisor.ws", +"b-data.io", +"backplaneapp.io", +"balena-devices.com", +"app.banzaicloud.io", +"betainabox.com", +"bnr.la", +"blackbaudcdn.net", +"boomla.net", +"boxfuse.io", +"square7.ch", +"bplaced.com", +"bplaced.de", +"square7.de", +"bplaced.net", +"square7.net", +"browsersafetymark.io", +"uk0.bigv.io", +"dh.bytemark.co.uk", +"vm.bytemark.co.uk", +"mycd.eu", +"carrd.co", +"crd.co", +"uwu.ai", +"ae.org", +"ar.com", +"br.com", +"cn.com", +"com.de", +"com.se", +"de.com", +"eu.com", +"gb.com", +"gb.net", +"hu.com", +"hu.net", +"jp.net", +"jpn.com", +"kr.com", +"mex.com", +"no.com", +"qc.com", +"ru.com", +"sa.com", +"se.net", +"uk.com", +"uk.net", +"us.com", +"uy.com", +"za.bz", +"za.com", +"africa.com", +"gr.com", +"in.net", +"us.org", +"co.com", +"c.la", +"certmgr.org", +"xenapponazure.com", +"discourse.group", +"discourse.team", +"virtueeldomein.nl", +"cleverapps.io", +"*.lcl.dev", +"*.stg.dev", +"c66.me", +"cloud66.ws", +"cloud66.zone", +"jdevcloud.com", +"wpdevcloud.com", +"cloudaccess.host", +"freesite.host", +"cloudaccess.net", +"cloudcontrolled.com", +"cloudcontrolapp.com", +"cloudera.site", +"trycloudflare.com", +"workers.dev", +"wnext.app", +"co.ca", +"*.otap.co", +"co.cz", +"c.cdn77.org", +"cdn77-ssl.net", +"r.cdn77.net", +"rsc.cdn77.org", +"ssl.origin.cdn77-secure.org", +"cloudns.asia", +"cloudns.biz", +"cloudns.club", +"cloudns.cc", +"cloudns.eu", +"cloudns.in", +"cloudns.info", +"cloudns.org", +"cloudns.pro", +"cloudns.pw", +"cloudns.us", +"cloudeity.net", +"cnpy.gdn", +"co.nl", +"co.no", +"webhosting.be", +"hosting-cluster.nl", +"ac.ru", +"edu.ru", +"gov.ru", +"int.ru", +"mil.ru", +"test.ru", +"dyn.cosidns.de", +"dynamisches-dns.de", +"dnsupdater.de", +"internet-dns.de", +"l-o-g-i-n.de", +"dynamic-dns.info", +"feste-ip.net", +"knx-server.net", +"static-access.net", +"realm.cz", +"*.cryptonomic.net", +"cupcake.is", +"*.customer-oci.com", +"*.oci.customer-oci.com", +"*.ocp.customer-oci.com", +"*.ocs.customer-oci.com", +"cyon.link", +"cyon.site", +"daplie.me", +"localhost.daplie.me", +"dattolocal.com", +"dattorelay.com", +"dattoweb.com", +"mydatto.com", +"dattolocal.net", +"mydatto.net", +"biz.dk", +"co.dk", +"firm.dk", +"reg.dk", +"store.dk", +"*.dapps.earth", +"*.bzz.dapps.earth", +"builtwithdark.com", +"edgestack.me", +"debian.net", +"dedyn.io", +"dnshome.de", +"online.th", +"shop.th", +"drayddns.com", +"dreamhosters.com", +"mydrobo.com", +"drud.io", +"drud.us", +"duckdns.org", +"dy.fi", +"tunk.org", +"dyndns-at-home.com", +"dyndns-at-work.com", +"dyndns-blog.com", +"dyndns-free.com", +"dyndns-home.com", +"dyndns-ip.com", +"dyndns-mail.com", +"dyndns-office.com", +"dyndns-pics.com", +"dyndns-remote.com", +"dyndns-server.com", +"dyndns-web.com", +"dyndns-wiki.com", +"dyndns-work.com", +"dyndns.biz", +"dyndns.info", +"dyndns.org", +"dyndns.tv", +"at-band-camp.net", +"ath.cx", +"barrel-of-knowledge.info", +"barrell-of-knowledge.info", +"better-than.tv", +"blogdns.com", +"blogdns.net", +"blogdns.org", +"blogsite.org", +"boldlygoingnowhere.org", +"broke-it.net", +"buyshouses.net", +"cechire.com", +"dnsalias.com", +"dnsalias.net", +"dnsalias.org", +"dnsdojo.com", +"dnsdojo.net", +"dnsdojo.org", +"does-it.net", +"doesntexist.com", +"doesntexist.org", +"dontexist.com", +"dontexist.net", +"dontexist.org", +"doomdns.com", +"doomdns.org", +"dvrdns.org", +"dyn-o-saur.com", +"dynalias.com", +"dynalias.net", +"dynalias.org", +"dynathome.net", +"dyndns.ws", +"endofinternet.net", +"endofinternet.org", +"endoftheinternet.org", +"est-a-la-maison.com", +"est-a-la-masion.com", +"est-le-patron.com", +"est-mon-blogueur.com", +"for-better.biz", +"for-more.biz", +"for-our.info", +"for-some.biz", +"for-the.biz", +"forgot.her.name", +"forgot.his.name", +"from-ak.com", +"from-al.com", +"from-ar.com", +"from-az.net", +"from-ca.com", +"from-co.net", +"from-ct.com", +"from-dc.com", +"from-de.com", +"from-fl.com", +"from-ga.com", +"from-hi.com", +"from-ia.com", +"from-id.com", +"from-il.com", +"from-in.com", +"from-ks.com", +"from-ky.com", +"from-la.net", +"from-ma.com", +"from-md.com", +"from-me.org", +"from-mi.com", +"from-mn.com", +"from-mo.com", +"from-ms.com", +"from-mt.com", +"from-nc.com", +"from-nd.com", +"from-ne.com", +"from-nh.com", +"from-nj.com", +"from-nm.com", +"from-nv.com", +"from-ny.net", +"from-oh.com", +"from-ok.com", +"from-or.com", +"from-pa.com", +"from-pr.com", +"from-ri.com", +"from-sc.com", +"from-sd.com", +"from-tn.com", +"from-tx.com", +"from-ut.com", +"from-va.com", +"from-vt.com", +"from-wa.com", +"from-wi.com", +"from-wv.com", +"from-wy.com", +"ftpaccess.cc", +"fuettertdasnetz.de", +"game-host.org", +"game-server.cc", +"getmyip.com", +"gets-it.net", +"go.dyndns.org", +"gotdns.com", +"gotdns.org", +"groks-the.info", +"groks-this.info", +"ham-radio-op.net", +"here-for-more.info", +"hobby-site.com", +"hobby-site.org", +"home.dyndns.org", +"homedns.org", +"homeftp.net", +"homeftp.org", +"homeip.net", +"homelinux.com", +"homelinux.net", +"homelinux.org", +"homeunix.com", +"homeunix.net", +"homeunix.org", +"iamallama.com", +"in-the-band.net", +"is-a-anarchist.com", +"is-a-blogger.com", +"is-a-bookkeeper.com", +"is-a-bruinsfan.org", +"is-a-bulls-fan.com", +"is-a-candidate.org", +"is-a-caterer.com", +"is-a-celticsfan.org", +"is-a-chef.com", +"is-a-chef.net", +"is-a-chef.org", +"is-a-conservative.com", +"is-a-cpa.com", +"is-a-cubicle-slave.com", +"is-a-democrat.com", +"is-a-designer.com", +"is-a-doctor.com", +"is-a-financialadvisor.com", +"is-a-geek.com", +"is-a-geek.net", +"is-a-geek.org", +"is-a-green.com", +"is-a-guru.com", +"is-a-hard-worker.com", +"is-a-hunter.com", +"is-a-knight.org", +"is-a-landscaper.com", +"is-a-lawyer.com", +"is-a-liberal.com", +"is-a-libertarian.com", +"is-a-linux-user.org", +"is-a-llama.com", +"is-a-musician.com", +"is-a-nascarfan.com", +"is-a-nurse.com", +"is-a-painter.com", +"is-a-patsfan.org", +"is-a-personaltrainer.com", +"is-a-photographer.com", +"is-a-player.com", +"is-a-republican.com", +"is-a-rockstar.com", +"is-a-socialist.com", +"is-a-soxfan.org", +"is-a-student.com", +"is-a-teacher.com", +"is-a-techie.com", +"is-a-therapist.com", +"is-an-accountant.com", +"is-an-actor.com", +"is-an-actress.com", +"is-an-anarchist.com", +"is-an-artist.com", +"is-an-engineer.com", +"is-an-entertainer.com", +"is-by.us", +"is-certified.com", +"is-found.org", +"is-gone.com", +"is-into-anime.com", +"is-into-cars.com", +"is-into-cartoons.com", +"is-into-games.com", +"is-leet.com", +"is-lost.org", +"is-not-certified.com", +"is-saved.org", +"is-slick.com", +"is-uberleet.com", +"is-very-bad.org", +"is-very-evil.org", +"is-very-good.org", +"is-very-nice.org", +"is-very-sweet.org", +"is-with-theband.com", +"isa-geek.com", +"isa-geek.net", +"isa-geek.org", +"isa-hockeynut.com", +"issmarterthanyou.com", +"isteingeek.de", +"istmein.de", +"kicks-ass.net", +"kicks-ass.org", +"knowsitall.info", +"land-4-sale.us", +"lebtimnetz.de", +"leitungsen.de", +"likes-pie.com", +"likescandy.com", +"merseine.nu", +"mine.nu", +"misconfused.org", +"mypets.ws", +"myphotos.cc", +"neat-url.com", +"office-on-the.net", +"on-the-web.tv", +"podzone.net", +"podzone.org", +"readmyblog.org", +"saves-the-whales.com", +"scrapper-site.net", +"scrapping.cc", +"selfip.biz", +"selfip.com", +"selfip.info", +"selfip.net", +"selfip.org", +"sells-for-less.com", +"sells-for-u.com", +"sells-it.net", +"sellsyourhome.org", +"servebbs.com", +"servebbs.net", +"servebbs.org", +"serveftp.net", +"serveftp.org", +"servegame.org", +"shacknet.nu", +"simple-url.com", +"space-to-rent.com", +"stuff-4-sale.org", +"stuff-4-sale.us", +"teaches-yoga.com", +"thruhere.net", +"traeumtgerade.de", +"webhop.biz", +"webhop.info", +"webhop.net", +"webhop.org", +"worse-than.tv", +"writesthisblog.com", +"ddnss.de", +"dyn.ddnss.de", +"dyndns.ddnss.de", +"dyndns1.de", +"dyn-ip24.de", +"home-webserver.de", +"dyn.home-webserver.de", +"myhome-server.de", +"ddnss.org", +"definima.net", +"definima.io", +"bci.dnstrace.pro", +"ddnsfree.com", +"ddnsgeek.com", +"giize.com", +"gleeze.com", +"kozow.com", +"loseyourip.com", +"ooguy.com", +"theworkpc.com", +"casacam.net", +"dynu.net", +"accesscam.org", +"camdvr.org", +"freeddns.org", +"mywire.org", +"webredirect.org", +"myddns.rocks", +"blogsite.xyz", +"dynv6.net", +"e4.cz", +"en-root.fr", +"mytuleap.com", +"onred.one", +"staging.onred.one", +"enonic.io", +"customer.enonic.io", +"eu.org", +"al.eu.org", +"asso.eu.org", +"at.eu.org", +"au.eu.org", +"be.eu.org", +"bg.eu.org", +"ca.eu.org", +"cd.eu.org", +"ch.eu.org", +"cn.eu.org", +"cy.eu.org", +"cz.eu.org", +"de.eu.org", +"dk.eu.org", +"edu.eu.org", +"ee.eu.org", +"es.eu.org", +"fi.eu.org", +"fr.eu.org", +"gr.eu.org", +"hr.eu.org", +"hu.eu.org", +"ie.eu.org", +"il.eu.org", +"in.eu.org", +"int.eu.org", +"is.eu.org", +"it.eu.org", +"jp.eu.org", +"kr.eu.org", +"lt.eu.org", +"lu.eu.org", +"lv.eu.org", +"mc.eu.org", +"me.eu.org", +"mk.eu.org", +"mt.eu.org", +"my.eu.org", +"net.eu.org", +"ng.eu.org", +"nl.eu.org", +"no.eu.org", +"nz.eu.org", +"paris.eu.org", +"pl.eu.org", +"pt.eu.org", +"q-a.eu.org", +"ro.eu.org", +"ru.eu.org", +"se.eu.org", +"si.eu.org", +"sk.eu.org", +"tr.eu.org", +"uk.eu.org", +"us.eu.org", +"eu-1.evennode.com", +"eu-2.evennode.com", +"eu-3.evennode.com", +"eu-4.evennode.com", +"us-1.evennode.com", +"us-2.evennode.com", +"us-3.evennode.com", +"us-4.evennode.com", +"twmail.cc", +"twmail.net", +"twmail.org", +"mymailer.com.tw", +"url.tw", +"apps.fbsbx.com", +"ru.net", +"adygeya.ru", +"bashkiria.ru", +"bir.ru", +"cbg.ru", +"com.ru", +"dagestan.ru", +"grozny.ru", +"kalmykia.ru", +"kustanai.ru", +"marine.ru", +"mordovia.ru", +"msk.ru", +"mytis.ru", +"nalchik.ru", +"nov.ru", +"pyatigorsk.ru", +"spb.ru", +"vladikavkaz.ru", +"vladimir.ru", +"abkhazia.su", +"adygeya.su", +"aktyubinsk.su", +"arkhangelsk.su", +"armenia.su", +"ashgabad.su", +"azerbaijan.su", +"balashov.su", +"bashkiria.su", +"bryansk.su", +"bukhara.su", +"chimkent.su", +"dagestan.su", +"east-kazakhstan.su", +"exnet.su", +"georgia.su", +"grozny.su", +"ivanovo.su", +"jambyl.su", +"kalmykia.su", +"kaluga.su", +"karacol.su", +"karaganda.su", +"karelia.su", +"khakassia.su", +"krasnodar.su", +"kurgan.su", +"kustanai.su", +"lenug.su", +"mangyshlak.su", +"mordovia.su", +"msk.su", +"murmansk.su", +"nalchik.su", +"navoi.su", +"north-kazakhstan.su", +"nov.su", +"obninsk.su", +"penza.su", +"pokrovsk.su", +"sochi.su", +"spb.su", +"tashkent.su", +"termez.su", +"togliatti.su", +"troitsk.su", +"tselinograd.su", +"tula.su", +"tuva.su", +"vladikavkaz.su", +"vladimir.su", +"vologda.su", +"channelsdvr.net", +"u.channelsdvr.net", +"fastly-terrarium.com", +"fastlylb.net", +"map.fastlylb.net", +"freetls.fastly.net", +"map.fastly.net", +"a.prod.fastly.net", +"global.prod.fastly.net", +"a.ssl.fastly.net", +"b.ssl.fastly.net", +"global.ssl.fastly.net", +"fastpanel.direct", +"fastvps-server.com", +"fhapp.xyz", +"fedorainfracloud.org", +"fedorapeople.org", +"cloud.fedoraproject.org", +"app.os.fedoraproject.org", +"app.os.stg.fedoraproject.org", +"mydobiss.com", +"filegear.me", +"filegear-au.me", +"filegear-de.me", +"filegear-gb.me", +"filegear-ie.me", +"filegear-jp.me", +"filegear-sg.me", +"firebaseapp.com", +"flynnhub.com", +"flynnhosting.net", +"0e.vc", +"freebox-os.com", +"freeboxos.com", +"fbx-os.fr", +"fbxos.fr", +"freebox-os.fr", +"freeboxos.fr", +"freedesktop.org", +"*.futurecms.at", +"*.ex.futurecms.at", +"*.in.futurecms.at", +"futurehosting.at", +"futuremailing.at", +"*.ex.ortsinfo.at", +"*.kunden.ortsinfo.at", +"*.statics.cloud", +"service.gov.uk", +"gehirn.ne.jp", +"usercontent.jp", +"gentapps.com", +"lab.ms", +"github.io", +"githubusercontent.com", +"gitlab.io", +"glitch.me", +"lolipop.io", +"cloudapps.digital", +"london.cloudapps.digital", +"homeoffice.gov.uk", +"ro.im", +"shop.ro", +"goip.de", +"run.app", +"a.run.app", +"web.app", +"*.0emm.com", +"appspot.com", +"*.r.appspot.com", +"blogspot.ae", +"blogspot.al", +"blogspot.am", +"blogspot.ba", +"blogspot.be", +"blogspot.bg", +"blogspot.bj", +"blogspot.ca", +"blogspot.cf", +"blogspot.ch", +"blogspot.cl", +"blogspot.co.at", +"blogspot.co.id", +"blogspot.co.il", +"blogspot.co.ke", +"blogspot.co.nz", +"blogspot.co.uk", +"blogspot.co.za", +"blogspot.com", +"blogspot.com.ar", +"blogspot.com.au", +"blogspot.com.br", +"blogspot.com.by", +"blogspot.com.co", +"blogspot.com.cy", +"blogspot.com.ee", +"blogspot.com.eg", +"blogspot.com.es", +"blogspot.com.mt", +"blogspot.com.ng", +"blogspot.com.tr", +"blogspot.com.uy", +"blogspot.cv", +"blogspot.cz", +"blogspot.de", +"blogspot.dk", +"blogspot.fi", +"blogspot.fr", +"blogspot.gr", +"blogspot.hk", +"blogspot.hr", +"blogspot.hu", +"blogspot.ie", +"blogspot.in", +"blogspot.is", +"blogspot.it", +"blogspot.jp", +"blogspot.kr", +"blogspot.li", +"blogspot.lt", +"blogspot.lu", +"blogspot.md", +"blogspot.mk", +"blogspot.mr", +"blogspot.mx", +"blogspot.my", +"blogspot.nl", +"blogspot.no", +"blogspot.pe", +"blogspot.pt", +"blogspot.qa", +"blogspot.re", +"blogspot.ro", +"blogspot.rs", +"blogspot.ru", +"blogspot.se", +"blogspot.sg", +"blogspot.si", +"blogspot.sk", +"blogspot.sn", +"blogspot.td", +"blogspot.tw", +"blogspot.ug", +"blogspot.vn", +"cloudfunctions.net", +"cloud.goog", +"codespot.com", +"googleapis.com", +"googlecode.com", +"pagespeedmobilizer.com", +"publishproxy.com", +"withgoogle.com", +"withyoutube.com", +"awsmppl.com", +"fin.ci", +"free.hr", +"caa.li", +"ua.rs", +"conf.se", +"hs.zone", +"hs.run", +"hashbang.sh", +"hasura.app", +"hasura-app.io", +"hepforge.org", +"herokuapp.com", +"herokussl.com", +"myravendb.com", +"ravendb.community", +"ravendb.me", +"development.run", +"ravendb.run", +"bpl.biz", +"orx.biz", +"ng.city", +"biz.gl", +"ng.ink", +"col.ng", +"firm.ng", +"gen.ng", +"ltd.ng", +"ngo.ng", +"ng.school", +"sch.so", +"häkkinen.fi", +"*.moonscale.io", +"moonscale.net", +"iki.fi", +"dyn-berlin.de", +"in-berlin.de", +"in-brb.de", +"in-butter.de", +"in-dsl.de", +"in-dsl.net", +"in-dsl.org", +"in-vpn.de", +"in-vpn.net", +"in-vpn.org", +"biz.at", +"info.at", +"info.cx", +"ac.leg.br", +"al.leg.br", +"am.leg.br", +"ap.leg.br", +"ba.leg.br", +"ce.leg.br", +"df.leg.br", +"es.leg.br", +"go.leg.br", +"ma.leg.br", +"mg.leg.br", +"ms.leg.br", +"mt.leg.br", +"pa.leg.br", +"pb.leg.br", +"pe.leg.br", +"pi.leg.br", +"pr.leg.br", +"rj.leg.br", +"rn.leg.br", +"ro.leg.br", +"rr.leg.br", +"rs.leg.br", +"sc.leg.br", +"se.leg.br", +"sp.leg.br", +"to.leg.br", +"pixolino.com", +"ipifony.net", +"mein-iserv.de", +"test-iserv.de", +"iserv.dev", +"iobb.net", +"myjino.ru", +"*.hosting.myjino.ru", +"*.landing.myjino.ru", +"*.spectrum.myjino.ru", +"*.vps.myjino.ru", +"*.triton.zone", +"*.cns.joyent.com", +"js.org", +"kaas.gg", +"khplay.nl", +"keymachine.de", +"kinghost.net", +"uni5.net", +"knightpoint.systems", +"oya.to", +"co.krd", +"edu.krd", +"git-repos.de", +"lcube-server.de", +"svn-repos.de", +"leadpages.co", +"lpages.co", +"lpusercontent.com", +"lelux.site", +"co.business", +"co.education", +"co.events", +"co.financial", +"co.network", +"co.place", +"co.technology", +"app.lmpm.com", +"linkitools.space", +"linkyard.cloud", +"linkyard-cloud.ch", +"members.linode.com", +"nodebalancer.linode.com", +"we.bs", +"loginline.app", +"loginline.dev", +"loginline.io", +"loginline.services", +"loginline.site", +"krasnik.pl", +"leczna.pl", +"lubartow.pl", +"lublin.pl", +"poniatowa.pl", +"swidnik.pl", +"uklugs.org", +"glug.org.uk", +"lug.org.uk", +"lugs.org.uk", +"barsy.bg", +"barsy.co.uk", +"barsyonline.co.uk", +"barsycenter.com", +"barsyonline.com", +"barsy.club", +"barsy.de", +"barsy.eu", +"barsy.in", +"barsy.info", +"barsy.io", +"barsy.me", +"barsy.menu", +"barsy.mobi", +"barsy.net", +"barsy.online", +"barsy.org", +"barsy.pro", +"barsy.pub", +"barsy.shop", +"barsy.site", +"barsy.support", +"barsy.uk", +"*.magentosite.cloud", +"mayfirst.info", +"mayfirst.org", +"hb.cldmail.ru", +"miniserver.com", +"memset.net", +"cloud.metacentrum.cz", +"custom.metacentrum.cz", +"flt.cloud.muni.cz", +"usr.cloud.muni.cz", +"meteorapp.com", +"eu.meteorapp.com", +"co.pl", +"azurecontainer.io", +"azurewebsites.net", +"azure-mobile.net", +"cloudapp.net", +"mozilla-iot.org", +"bmoattachments.org", +"net.ru", +"org.ru", +"pp.ru", +"ui.nabu.casa", +"pony.club", +"of.fashion", +"on.fashion", +"of.football", +"in.london", +"of.london", +"for.men", +"and.mom", +"for.mom", +"for.one", +"for.sale", +"of.work", +"to.work", +"nctu.me", +"bitballoon.com", +"netlify.com", +"4u.com", +"ngrok.io", +"nh-serv.co.uk", +"nfshost.com", +"dnsking.ch", +"mypi.co", +"n4t.co", +"001www.com", +"ddnslive.com", +"myiphost.com", +"forumz.info", +"16-b.it", +"32-b.it", +"64-b.it", +"soundcast.me", +"tcp4.me", +"dnsup.net", +"hicam.net", +"now-dns.net", +"ownip.net", +"vpndns.net", +"dynserv.org", +"now-dns.org", +"x443.pw", +"now-dns.top", +"ntdll.top", +"freeddns.us", +"crafting.xyz", +"zapto.xyz", +"nsupdate.info", +"nerdpol.ovh", +"blogsyte.com", +"brasilia.me", +"cable-modem.org", +"ciscofreak.com", +"collegefan.org", +"couchpotatofries.org", +"damnserver.com", +"ddns.me", +"ditchyourip.com", +"dnsfor.me", +"dnsiskinky.com", +"dvrcam.info", +"dynns.com", +"eating-organic.net", +"fantasyleague.cc", +"geekgalaxy.com", +"golffan.us", +"health-carereform.com", +"homesecuritymac.com", +"homesecuritypc.com", +"hopto.me", +"ilovecollege.info", +"loginto.me", +"mlbfan.org", +"mmafan.biz", +"myactivedirectory.com", +"mydissent.net", +"myeffect.net", +"mymediapc.net", +"mypsx.net", +"mysecuritycamera.com", +"mysecuritycamera.net", +"mysecuritycamera.org", +"net-freaks.com", +"nflfan.org", +"nhlfan.net", +"no-ip.ca", +"no-ip.co.uk", +"no-ip.net", +"noip.us", +"onthewifi.com", +"pgafan.net", +"point2this.com", +"pointto.us", +"privatizehealthinsurance.net", +"quicksytes.com", +"read-books.org", +"securitytactics.com", +"serveexchange.com", +"servehumour.com", +"servep2p.com", +"servesarcasm.com", +"stufftoread.com", +"ufcfan.org", +"unusualperson.com", +"workisboring.com", +"3utilities.com", +"bounceme.net", +"ddns.net", +"ddnsking.com", +"gotdns.ch", +"hopto.org", +"myftp.biz", +"myftp.org", +"myvnc.com", +"no-ip.biz", +"no-ip.info", +"no-ip.org", +"noip.me", +"redirectme.net", +"servebeer.com", +"serveblog.net", +"servecounterstrike.com", +"serveftp.com", +"servegame.com", +"servehalflife.com", +"servehttp.com", +"serveirc.com", +"serveminecraft.net", +"servemp3.com", +"servepics.com", +"servequake.com", +"sytes.net", +"webhop.me", +"zapto.org", +"stage.nodeart.io", +"nodum.co", +"nodum.io", +"pcloud.host", +"nyc.mn", +"nom.ae", +"nom.af", +"nom.ai", +"nom.al", +"nym.by", +"nom.bz", +"nym.bz", +"nom.cl", +"nym.ec", +"nom.gd", +"nom.ge", +"nom.gl", +"nym.gr", +"nom.gt", +"nym.gy", +"nym.hk", +"nom.hn", +"nym.ie", +"nom.im", +"nom.ke", +"nym.kz", +"nym.la", +"nym.lc", +"nom.li", +"nym.li", +"nym.lt", +"nym.lu", +"nom.lv", +"nym.me", +"nom.mk", +"nym.mn", +"nym.mx", +"nom.nu", +"nym.nz", +"nym.pe", +"nym.pt", +"nom.pw", +"nom.qa", +"nym.ro", +"nom.rs", +"nom.si", +"nym.sk", +"nom.st", +"nym.su", +"nym.sx", +"nom.tj", +"nym.tw", +"nom.ug", +"nom.uy", +"nom.vc", +"nom.vg", +"static.observableusercontent.com", +"cya.gg", +"cloudycluster.net", +"nid.io", +"opencraft.hosting", +"operaunite.com", +"skygearapp.com", +"outsystemscloud.com", +"ownprovider.com", +"own.pm", +"ox.rs", +"oy.lc", +"pgfog.com", +"pagefrontapp.com", +"art.pl", +"gliwice.pl", +"krakow.pl", +"poznan.pl", +"wroc.pl", +"zakopane.pl", +"pantheonsite.io", +"gotpantheon.com", +"mypep.link", +"perspecta.cloud", +"on-web.fr", +"*.platform.sh", +"*.platformsh.site", +"dyn53.io", +"co.bn", +"xen.prgmr.com", +"priv.at", +"prvcy.page", +"*.dweb.link", +"protonet.io", +"chirurgiens-dentistes-en-france.fr", +"byen.site", +"pubtls.org", +"qualifioapp.com", +"qbuser.com", +"instantcloud.cn", +"ras.ru", +"qa2.com", +"qcx.io", +"*.sys.qcx.io", +"dev-myqnapcloud.com", +"alpha-myqnapcloud.com", +"myqnapcloud.com", +"*.quipelements.com", +"vapor.cloud", +"vaporcloud.io", +"rackmaze.com", +"rackmaze.net", +"*.on-k3s.io", +"*.on-rancher.cloud", +"*.on-rio.io", +"readthedocs.io", +"rhcloud.com", +"app.render.com", +"onrender.com", +"repl.co", +"repl.run", +"resindevice.io", +"devices.resinstaging.io", +"hzc.io", +"wellbeingzone.eu", +"ptplus.fit", +"wellbeingzone.co.uk", +"git-pages.rit.edu", +"sandcats.io", +"logoip.de", +"logoip.com", +"schokokeks.net", +"gov.scot", +"scrysec.com", +"firewall-gateway.com", +"firewall-gateway.de", +"my-gateway.de", +"my-router.de", +"spdns.de", +"spdns.eu", +"firewall-gateway.net", +"my-firewall.org", +"myfirewall.org", +"spdns.org", +"senseering.net", +"biz.ua", +"co.ua", +"pp.ua", +"shiftedit.io", +"myshopblocks.com", +"shopitsite.com", +"mo-siemens.io", +"1kapp.com", +"appchizi.com", +"applinzi.com", +"sinaapp.com", +"vipsinaapp.com", +"siteleaf.net", +"bounty-full.com", +"alpha.bounty-full.com", +"beta.bounty-full.com", +"stackhero-network.com", +"static.land", +"dev.static.land", +"sites.static.land", +"apps.lair.io", +"*.stolos.io", +"spacekit.io", +"customer.speedpartner.de", +"api.stdlib.com", +"storj.farm", +"utwente.io", +"soc.srcf.net", +"user.srcf.net", +"temp-dns.com", +"applicationcloud.io", +"scapp.io", +"*.s5y.io", +"*.sensiosite.cloud", +"syncloud.it", +"diskstation.me", +"dscloud.biz", +"dscloud.me", +"dscloud.mobi", +"dsmynas.com", +"dsmynas.net", +"dsmynas.org", +"familyds.com", +"familyds.net", +"familyds.org", +"i234.me", +"myds.me", +"synology.me", +"vpnplus.to", +"direct.quickconnect.to", +"taifun-dns.de", +"gda.pl", +"gdansk.pl", +"gdynia.pl", +"med.pl", +"sopot.pl", +"edugit.org", +"telebit.app", +"telebit.io", +"*.telebit.xyz", +"gwiddle.co.uk", +"thingdustdata.com", +"cust.dev.thingdust.io", +"cust.disrec.thingdust.io", +"cust.prod.thingdust.io", +"cust.testing.thingdust.io", +"arvo.network", +"azimuth.network", +"bloxcms.com", +"townnews-staging.com", +"12hp.at", +"2ix.at", +"4lima.at", +"lima-city.at", +"12hp.ch", +"2ix.ch", +"4lima.ch", +"lima-city.ch", +"trafficplex.cloud", +"de.cool", +"12hp.de", +"2ix.de", +"4lima.de", +"lima-city.de", +"1337.pictures", +"clan.rip", +"lima-city.rocks", +"webspace.rocks", +"lima.zone", +"*.transurl.be", +"*.transurl.eu", +"*.transurl.nl", +"tuxfamily.org", +"dd-dns.de", +"diskstation.eu", +"diskstation.org", +"dray-dns.de", +"draydns.de", +"dyn-vpn.de", +"dynvpn.de", +"mein-vigor.de", +"my-vigor.de", +"my-wan.de", +"syno-ds.de", +"synology-diskstation.de", +"synology-ds.de", +"uber.space", +"*.uberspace.de", +"hk.com", +"hk.org", +"ltd.hk", +"inc.hk", +"virtualuser.de", +"virtual-user.de", +"urown.cloud", +"dnsupdate.info", +"lib.de.us", +"2038.io", +"router.management", +"v-info.info", +"voorloper.cloud", +"v.ua", +"wafflecell.com", +"*.webhare.dev", +"wedeploy.io", +"wedeploy.me", +"wedeploy.sh", +"remotewd.com", +"wmflabs.org", +"myforum.community", +"community-pro.de", +"diskussionsbereich.de", +"community-pro.net", +"meinforum.net", +"half.host", +"xnbay.com", +"u2.xnbay.com", +"u2-local.xnbay.com", +"cistron.nl", +"demon.nl", +"xs4all.space", +"yandexcloud.net", +"storage.yandexcloud.net", +"website.yandexcloud.net", +"official.academy", +"yolasite.com", +"ybo.faith", +"yombo.me", +"homelink.one", +"ybo.party", +"ybo.review", +"ybo.science", +"ybo.trade", +"nohost.me", +"noho.st", +"za.net", +"za.org", +"now.sh", +"bss.design", +"basicserver.io", +"virtualserver.io", +"enterprisecloud.nu" +] \ No newline at end of file diff --git a/node_modules/psl/dist/psl.js b/node_modules/psl/dist/psl.js new file mode 100644 index 00000000..f4b9b892 --- /dev/null +++ b/node_modules/psl/dist/psl.js @@ -0,0 +1,9645 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.psl = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + +},{"./data/rules.json":1,"punycode":3}],3:[function(require,module,exports){ +(function (global){ +/*! https://mths.be/punycode v1.4.1 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw new RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.4.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define('punycode', function() { + return punycode; + }); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { + // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { + // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { + // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); + +}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) +},{}]},{},[2])(2) +}); diff --git a/node_modules/psl/dist/psl.min.js b/node_modules/psl/dist/psl.min.js new file mode 100644 index 00000000..d5c787e4 --- /dev/null +++ b/node_modules/psl/dist/psl.min.js @@ -0,0 +1 @@ +!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).psl=a()}}(function(){return function s(m,t,u){function r(o,a){if(!t[o]){if(!m[o]){var i="function"==typeof require&&require;if(!a&&i)return i(o,!0);if(p)return p(o,!0);var e=new Error("Cannot find module '"+o+"'");throw e.code="MODULE_NOT_FOUND",e}var n=t[o]={exports:{}};m[o][0].call(n.exports,function(a){return r(m[o][1][a]||a)},n,n.exports,s,m,t,u)}return t[o].exports}for(var p="function"==typeof require&&require,a=0;a= 0x80 (not a basic code point)","invalid-input":"Invalid input"},c=b-y,x=Math.floor,q=String.fromCharCode;function A(a){throw new RangeError(k[a])}function l(a,o){for(var i=a.length,e=[];i--;)e[i]=o(a[i]);return e}function g(a,o){var i=a.split("@"),e="";return 1>>10&1023|55296),a=56320|1023&a),o+=q(a)}).join("")}function L(a,o){return a+22+75*(a<26)-((0!=o)<<5)}function I(a,o,i){var e=0;for(a=i?x(a/t):a>>1,a+=x(a/o);c*f>>1x((d-g)/m))&&A("overflow"),g+=u*m,!(u<(r=t<=j?y:j+f<=t?f:t-j));t+=b)m>x(d/(p=b-r))&&A("overflow"),m*=p;j=I(g-s,o=c.length+1,0==s),x(g/o)>d-h&&A("overflow"),h+=x(g/o),g%=o,c.splice(g++,0,h)}return _(c)}function j(a){var o,i,e,n,s,m,t,u,r,p,k,c,l,g,h,j=[];for(c=(a=O(a)).length,o=w,s=v,m=i=0;mx((d-i)/(l=e+1))&&A("overflow"),i+=(t-o)*l,o=t,m=0;md&&A("overflow"),k==o){for(u=i,r=b;!(u<(p=r<=s?y:s+f<=r?f:r-s));r+=b)h=u-p,g=b-p,j.push(q(L(p+h%g,0))),u=x(h/g);j.push(q(L(u,0))),s=I(i,l,e==n),i=0,++e}++i,++o}return j.join("")}if(n={version:"1.4.1",ucs2:{decode:O,encode:_},decode:h,encode:j,toASCII:function(a){return g(a,function(a){return r.test(a)?"xn--"+j(a):a})},toUnicode:function(a){return g(a,function(a){return u.test(a)?h(a.slice(4).toLowerCase()):a})}},0,o&&i)if(T.exports==o)i.exports=n;else for(s in n)n.hasOwnProperty(s)&&(o[s]=n[s]);else a.punycode=n}(this)}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}]},{},[2])(2)}); diff --git a/node_modules/psl/index.js b/node_modules/psl/index.js new file mode 100644 index 00000000..da7bc121 --- /dev/null +++ b/node_modules/psl/index.js @@ -0,0 +1,269 @@ +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ +'use strict'; + + +var Punycode = require('punycode'); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = require('./data/rules.json').map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; diff --git a/node_modules/psl/package.json b/node_modules/psl/package.json new file mode 100644 index 00000000..ee2da19f --- /dev/null +++ b/node_modules/psl/package.json @@ -0,0 +1,44 @@ +{ + "name": "psl", + "version": "1.8.0", + "description": "Domain name parser based on the Public Suffix List", + "repository": { + "type": "git", + "url": "git@github.com:lupomontero/psl.git" + }, + "main": "index.js", + "scripts": { + "pretest": "eslint .", + "test": "mocha test && karma start ./karma.conf.js --single-run", + "watch": "mocha test --watch", + "prebuild": "./scripts/update-rules.js", + "build": "browserify ./index.js --standalone=psl > ./dist/psl.js", + "postbuild": "cat ./dist/psl.js | uglifyjs -c -m > ./dist/psl.min.js", + "commit-and-pr": "commit-and-pr", + "changelog": "git log $(git describe --tags --abbrev=0)..HEAD --oneline --format=\"%h %s (%an <%ae>)\"" + }, + "keywords": [ + "publicsuffix", + "publicsuffixlist" + ], + "author": "Lupo Montero (https://lupomontero.com/)", + "license": "MIT", + "devDependencies": { + "JSONStream": "^1.3.5", + "browserify": "^16.5.0", + "commit-and-pr": "^1.0.4", + "eslint": "^6.8.0", + "eslint-config-hapi": "^12.0.0", + "eslint-plugin-hapi": "^4.1.0", + "karma": "^4.4.1", + "karma-browserify": "^7.0.0", + "karma-mocha": "^1.3.0", + "karma-mocha-reporter": "^2.2.5", + "karma-phantomjs-launcher": "^1.0.4", + "mocha": "^7.1.1", + "phantomjs-prebuilt": "^2.1.16", + "request": "^2.88.2", + "uglify-js": "^3.8.0", + "watchify": "^3.11.1" + } +} diff --git a/node_modules/punycode/LICENSE-MIT.txt b/node_modules/punycode/LICENSE-MIT.txt new file mode 100644 index 00000000..a41e0a7e --- /dev/null +++ b/node_modules/punycode/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/punycode/README.md b/node_modules/punycode/README.md new file mode 100644 index 00000000..ee2f9d63 --- /dev/null +++ b/node_modules/punycode/README.md @@ -0,0 +1,122 @@ +# Punycode.js [![Build status](https://travis-ci.org/bestiejs/punycode.js.svg?branch=master)](https://travis-ci.org/bestiejs/punycode.js) [![Code coverage status](http://img.shields.io/codecov/c/github/bestiejs/punycode.js.svg)](https://codecov.io/gh/bestiejs/punycode.js) [![Dependency status](https://gemnasium.com/bestiejs/punycode.js.svg)](https://gemnasium.com/bestiejs/punycode.js) + +Punycode.js is a robust Punycode converter that fully complies to [RFC 3492](https://tools.ietf.org/html/rfc3492) and [RFC 5891](https://tools.ietf.org/html/rfc5891). + +This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm: + +* [The C example code from RFC 3492](https://tools.ietf.org/html/rfc3492#appendix-C) +* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c) +* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c) +* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287) +* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072)) + +This project was [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with Node.js from [v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc) until [v7](https://github.com/nodejs/node/pull/7941) (soft-deprecated). + +The current version supports recent versions of Node.js only. It provides a CommonJS module and an ES6 module. For the old version that offers the same functionality with broader support, including Rhino, Ringo, Narwhal, and web browsers, see [v1.4.1](https://github.com/bestiejs/punycode.js/releases/tag/v1.4.1). + +## Installation + +Via [npm](https://www.npmjs.com/): + +```bash +npm install punycode --save +``` + +In [Node.js](https://nodejs.org/): + +```js +const punycode = require('punycode'); +``` + +## API + +### `punycode.decode(string)` + +Converts a Punycode string of ASCII symbols to a string of Unicode symbols. + +```js +// decode domain name parts +punycode.decode('maana-pta'); // 'mañana' +punycode.decode('--dqo34k'); // '☃-⌘' +``` + +### `punycode.encode(string)` + +Converts a string of Unicode symbols to a Punycode string of ASCII symbols. + +```js +// encode domain name parts +punycode.encode('mañana'); // 'maana-pta' +punycode.encode('☃-⌘'); // '--dqo34k' +``` + +### `punycode.toUnicode(input)` + +Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode. + +```js +// decode domain names +punycode.toUnicode('xn--maana-pta.com'); +// → 'mañana.com' +punycode.toUnicode('xn----dqo34k.com'); +// → '☃-⌘.com' + +// decode email addresses +punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'); +// → 'джумла@джpумлатест.bрфa' +``` + +### `punycode.toASCII(input)` + +Converts a lowercased Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that’s already in ASCII. + +```js +// encode domain names +punycode.toASCII('mañana.com'); +// → 'xn--maana-pta.com' +punycode.toASCII('☃-⌘.com'); +// → 'xn----dqo34k.com' + +// encode email addresses +punycode.toASCII('джумла@джpумлатест.bрфa'); +// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq' +``` + +### `punycode.ucs2` + +#### `punycode.ucs2.decode(string)` + +Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16. + +```js +punycode.ucs2.decode('abc'); +// → [0x61, 0x62, 0x63] +// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE: +punycode.ucs2.decode('\uD834\uDF06'); +// → [0x1D306] +``` + +#### `punycode.ucs2.encode(codePoints)` + +Creates a string based on an array of numeric code point values. + +```js +punycode.ucs2.encode([0x61, 0x62, 0x63]); +// → 'abc' +punycode.ucs2.encode([0x1D306]); +// → '\uD834\uDF06' +``` + +### `punycode.version` + +A string representing the current Punycode.js version number. + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +Punycode.js is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/punycode/package.json b/node_modules/punycode/package.json new file mode 100644 index 00000000..9202ccf8 --- /dev/null +++ b/node_modules/punycode/package.json @@ -0,0 +1,58 @@ +{ + "name": "punycode", + "version": "2.1.1", + "description": "A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, and works on nearly all JavaScript platforms.", + "homepage": "https://mths.be/punycode", + "main": "punycode.js", + "jsnext:main": "punycode.es6.js", + "module": "punycode.es6.js", + "engines": { + "node": ">=6" + }, + "keywords": [ + "punycode", + "unicode", + "idn", + "idna", + "dns", + "url", + "domain" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "contributors": [ + { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + } + ], + "repository": { + "type": "git", + "url": "https://github.com/bestiejs/punycode.js.git" + }, + "bugs": "https://github.com/bestiejs/punycode.js/issues", + "files": [ + "LICENSE-MIT.txt", + "punycode.js", + "punycode.es6.js" + ], + "scripts": { + "test": "mocha tests", + "prepublish": "node scripts/prepublish.js" + }, + "devDependencies": { + "codecov": "^1.0.1", + "istanbul": "^0.4.1", + "mocha": "^2.5.3" + }, + "jspm": { + "map": { + "./punycode.js": { + "node": "@node/punycode" + } + } + } +} diff --git a/node_modules/punycode/punycode.es6.js b/node_modules/punycode/punycode.es6.js new file mode 100644 index 00000000..4610bc9e --- /dev/null +++ b/node_modules/punycode/punycode.es6.js @@ -0,0 +1,441 @@ +'use strict'; + +/** Highest positive signed 32-bit float value */ +const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 + +/** Bootstring parameters */ +const base = 36; +const tMin = 1; +const tMax = 26; +const skew = 38; +const damp = 700; +const initialBias = 72; +const initialN = 128; // 0x80 +const delimiter = '-'; // '\x2D' + +/** Regular expressions */ +const regexPunycode = /^xn--/; +const regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars +const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators + +/** Error messages */ +const errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' +}; + +/** Convenience shortcuts */ +const baseMinusTMin = base - tMin; +const floor = Math.floor; +const stringFromCharCode = String.fromCharCode; + +/*--------------------------------------------------------------------------*/ + +/** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ +function error(type) { + throw new RangeError(errors[type]); +} + +/** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ +function map(array, fn) { + const result = []; + let length = array.length; + while (length--) { + result[length] = fn(array[length]); + } + return result; +} + +/** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ +function mapDomain(string, fn) { + const parts = string.split('@'); + let result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + const labels = string.split('.'); + const encoded = map(labels, fn).join('.'); + return result + encoded; +} + +/** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ +function ucs2decode(string) { + const output = []; + let counter = 0; + const length = string.length; + while (counter < length) { + const value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // It's a high surrogate, and there is a next character. + const extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // Low surrogate. + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // It's an unmatched surrogate; only append this code unit, in case the + // next code unit is the high surrogate of a surrogate pair. + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; +} + +/** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ +const ucs2encode = array => String.fromCodePoint(...array); + +/** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ +const basicToDigit = function(codePoint) { + if (codePoint - 0x30 < 0x0A) { + return codePoint - 0x16; + } + if (codePoint - 0x41 < 0x1A) { + return codePoint - 0x41; + } + if (codePoint - 0x61 < 0x1A) { + return codePoint - 0x61; + } + return base; +}; + +/** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ +const digitToBasic = function(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); +}; + +/** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ +const adapt = function(delta, numPoints, firstTime) { + let k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); +}; + +/** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ +const decode = function(input) { + // Don't use UCS-2. + const output = []; + const inputLength = input.length; + let i = 0; + let n = initialN; + let bias = initialBias; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + let basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (let j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + let oldi = i; + for (let w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + const digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + const baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + const out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output. + output.splice(i++, 0, n); + + } + + return String.fromCodePoint(...output); +}; + +/** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ +const encode = function(input) { + const output = []; + + // Convert the input in UCS-2 to an array of Unicode code points. + input = ucs2decode(input); + + // Cache the length. + let inputLength = input.length; + + // Initialize the state. + let n = initialN; + let delta = 0; + let bias = initialBias; + + // Handle the basic code points. + for (const currentValue of input) { + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + let basicLength = output.length; + let handledCPCount = basicLength; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string with a delimiter unless it's empty. + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + let m = maxInt; + for (const currentValue of input) { + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow. + const handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (const currentValue of input) { + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + if (currentValue == n) { + // Represent delta as a generalized variable-length integer. + let q = delta; + for (let k = base; /* no condition */; k += base) { + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + const qMinusT = q - t; + const baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); +}; + +/** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ +const toUnicode = function(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); +}; + +/** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ +const toASCII = function(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); +}; + +/*--------------------------------------------------------------------------*/ + +/** Define the public API */ +const punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '2.1.0', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode +}; + +export { ucs2decode, ucs2encode, decode, encode, toASCII, toUnicode }; +export default punycode; diff --git a/node_modules/punycode/punycode.js b/node_modules/punycode/punycode.js new file mode 100644 index 00000000..ea61fd0d --- /dev/null +++ b/node_modules/punycode/punycode.js @@ -0,0 +1,440 @@ +'use strict'; + +/** Highest positive signed 32-bit float value */ +const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 + +/** Bootstring parameters */ +const base = 36; +const tMin = 1; +const tMax = 26; +const skew = 38; +const damp = 700; +const initialBias = 72; +const initialN = 128; // 0x80 +const delimiter = '-'; // '\x2D' + +/** Regular expressions */ +const regexPunycode = /^xn--/; +const regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars +const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators + +/** Error messages */ +const errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' +}; + +/** Convenience shortcuts */ +const baseMinusTMin = base - tMin; +const floor = Math.floor; +const stringFromCharCode = String.fromCharCode; + +/*--------------------------------------------------------------------------*/ + +/** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ +function error(type) { + throw new RangeError(errors[type]); +} + +/** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ +function map(array, fn) { + const result = []; + let length = array.length; + while (length--) { + result[length] = fn(array[length]); + } + return result; +} + +/** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ +function mapDomain(string, fn) { + const parts = string.split('@'); + let result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + const labels = string.split('.'); + const encoded = map(labels, fn).join('.'); + return result + encoded; +} + +/** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ +function ucs2decode(string) { + const output = []; + let counter = 0; + const length = string.length; + while (counter < length) { + const value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // It's a high surrogate, and there is a next character. + const extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // Low surrogate. + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // It's an unmatched surrogate; only append this code unit, in case the + // next code unit is the high surrogate of a surrogate pair. + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; +} + +/** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ +const ucs2encode = array => String.fromCodePoint(...array); + +/** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ +const basicToDigit = function(codePoint) { + if (codePoint - 0x30 < 0x0A) { + return codePoint - 0x16; + } + if (codePoint - 0x41 < 0x1A) { + return codePoint - 0x41; + } + if (codePoint - 0x61 < 0x1A) { + return codePoint - 0x61; + } + return base; +}; + +/** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ +const digitToBasic = function(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); +}; + +/** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ +const adapt = function(delta, numPoints, firstTime) { + let k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); +}; + +/** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ +const decode = function(input) { + // Don't use UCS-2. + const output = []; + const inputLength = input.length; + let i = 0; + let n = initialN; + let bias = initialBias; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + let basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (let j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + let oldi = i; + for (let w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + const digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + const baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + const out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output. + output.splice(i++, 0, n); + + } + + return String.fromCodePoint(...output); +}; + +/** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ +const encode = function(input) { + const output = []; + + // Convert the input in UCS-2 to an array of Unicode code points. + input = ucs2decode(input); + + // Cache the length. + let inputLength = input.length; + + // Initialize the state. + let n = initialN; + let delta = 0; + let bias = initialBias; + + // Handle the basic code points. + for (const currentValue of input) { + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + let basicLength = output.length; + let handledCPCount = basicLength; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string with a delimiter unless it's empty. + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + let m = maxInt; + for (const currentValue of input) { + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow. + const handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (const currentValue of input) { + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + if (currentValue == n) { + // Represent delta as a generalized variable-length integer. + let q = delta; + for (let k = base; /* no condition */; k += base) { + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + const qMinusT = q - t; + const baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); +}; + +/** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ +const toUnicode = function(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); +}; + +/** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ +const toASCII = function(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); +}; + +/*--------------------------------------------------------------------------*/ + +/** Define the public API */ +const punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '2.1.0', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode +}; + +module.exports = punycode; diff --git a/node_modules/sax/LICENSE b/node_modules/sax/LICENSE new file mode 100644 index 00000000..ccffa082 --- /dev/null +++ b/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sax/README.md b/node_modules/sax/README.md new file mode 100644 index 00000000..afcd3f3d --- /dev/null +++ b/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/sax/lib/sax.js b/node_modules/sax/lib/sax.js new file mode 100644 index 00000000..795d607e --- /dev/null +++ b/node_modules/sax/lib/sax.js @@ -0,0 +1,1565 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // \ No newline at end of file diff --git a/node_modules/tslib/tslib.es6.js b/node_modules/tslib/tslib.es6.js new file mode 100644 index 00000000..0e0d8d07 --- /dev/null +++ b/node_modules/tslib/tslib.es6.js @@ -0,0 +1,218 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export function __createBinding(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +} + +export function __exportStar(m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +}; + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result.default = mod; + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); +} + +export function __classPrivateFieldSet(receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; +} diff --git a/node_modules/tslib/tslib.html b/node_modules/tslib/tslib.html new file mode 100644 index 00000000..44c9ba51 --- /dev/null +++ b/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/tslib/tslib.js b/node_modules/tslib/tslib.js new file mode 100644 index 00000000..e5b7c9b8 --- /dev/null +++ b/node_modules/tslib/tslib.js @@ -0,0 +1,284 @@ +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + + __extends = function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __createBinding = function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }; + + __exportStar = function (m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; + }; + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); diff --git a/node_modules/universalify/LICENSE b/node_modules/universalify/LICENSE new file mode 100644 index 00000000..514e84e6 --- /dev/null +++ b/node_modules/universalify/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2017, Ryan Zimmerman + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the 'Software'), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/universalify/README.md b/node_modules/universalify/README.md new file mode 100644 index 00000000..487067bf --- /dev/null +++ b/node_modules/universalify/README.md @@ -0,0 +1,76 @@ +# universalify + +[![Travis branch](https://img.shields.io/travis/RyanZim/universalify/master.svg)](https://travis-ci.org/RyanZim/universalify) +![Coveralls github branch](https://img.shields.io/coveralls/github/RyanZim/universalify/master.svg) +![npm](https://img.shields.io/npm/dm/universalify.svg) +![npm](https://img.shields.io/npm/l/universalify.svg) + +Make a callback- or promise-based function support both promises and callbacks. + +Uses the native promise implementation. + +## Installation + +```bash +npm install universalify +``` + +## API + +### `universalify.fromCallback(fn)` + +Takes a callback-based function to universalify, and returns the universalified function. + +Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with more than three arguments, and does not ensure that the callback is only called once. + +```js +function callbackFn (n, cb) { + setTimeout(() => cb(null, n), 15) +} + +const fn = universalify.fromCallback(callbackFn) + +// Works with Promises: +fn('Hello World!') +.then(result => console.log(result)) // -> Hello World! +.catch(error => console.error(error)) + +// Works with Callbacks: +fn('Hi!', (error, result) => { + if (error) return console.error(error) + console.log(result) + // -> Hi! +}) +``` + +### `universalify.fromPromise(fn)` + +Takes a promise-based function to universalify, and returns the universalified function. + +Function must return a valid JS promise. `universalify` does not ensure that a valid promise is returned. + +```js +function promiseFn (n) { + return new Promise(resolve => { + setTimeout(() => resolve(n), 15) + }) +} + +const fn = universalify.fromPromise(promiseFn) + +// Works with Promises: +fn('Hello World!') +.then(result => console.log(result)) // -> Hello World! +.catch(error => console.error(error)) + +// Works with Callbacks: +fn('Hi!', (error, result) => { + if (error) return console.error(error) + console.log(result) + // -> Hi! +}) +``` + +## License + +MIT diff --git a/node_modules/universalify/index.js b/node_modules/universalify/index.js new file mode 100644 index 00000000..0c9ba393 --- /dev/null +++ b/node_modules/universalify/index.js @@ -0,0 +1,25 @@ +'use strict' + +exports.fromCallback = function (fn) { + return Object.defineProperty(function () { + if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments) + else { + return new Promise((resolve, reject) => { + arguments[arguments.length] = (err, res) => { + if (err) return reject(err) + resolve(res) + } + arguments.length++ + fn.apply(this, arguments) + }) + } + }, 'name', { value: fn.name }) +} + +exports.fromPromise = function (fn) { + return Object.defineProperty(function () { + const cb = arguments[arguments.length - 1] + if (typeof cb !== 'function') return fn.apply(this, arguments) + else fn.apply(this, arguments).then(r => cb(null, r), cb) + }, 'name', { value: fn.name }) +} diff --git a/node_modules/universalify/package.json b/node_modules/universalify/package.json new file mode 100644 index 00000000..321b1928 --- /dev/null +++ b/node_modules/universalify/package.json @@ -0,0 +1,34 @@ +{ + "name": "universalify", + "version": "0.1.2", + "description": "Make a callback- or promise-based function support both promises and callbacks.", + "keywords": [ + "callback", + "native", + "promise" + ], + "homepage": "https://github.com/RyanZim/universalify#readme", + "bugs": "https://github.com/RyanZim/universalify/issues", + "license": "MIT", + "author": "Ryan Zimmerman ", + "files": [ + "index.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/RyanZim/universalify.git" + }, + "scripts": { + "test": "standard && nyc tape test/*.js | colortape" + }, + "devDependencies": { + "colortape": "^0.1.2", + "coveralls": "^3.0.1", + "nyc": "^10.2.0", + "standard": "^10.0.1", + "tape": "^4.6.3" + }, + "engines": { + "node": ">= 4.0.0" + } +} diff --git a/node_modules/xml2js/LICENSE b/node_modules/xml2js/LICENSE new file mode 100644 index 00000000..e3b4222a --- /dev/null +++ b/node_modules/xml2js/LICENSE @@ -0,0 +1,19 @@ +Copyright 2010, 2011, 2012, 2013. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/xml2js/README.md b/node_modules/xml2js/README.md new file mode 100644 index 00000000..7534c893 --- /dev/null +++ b/node_modules/xml2js/README.md @@ -0,0 +1,488 @@ +node-xml2js +=========== + +Ever had the urge to parse XML? And wanted to access the data in some sane, +easy way? Don't want to compile a C parser, for whatever reason? Then xml2js is +what you're looking for! + +Description +=========== + +Simple XML to JavaScript object converter. It supports bi-directional conversion. +Uses [sax-js](https://github.com/isaacs/sax-js/) and +[xmlbuilder-js](https://github.com/oozcitak/xmlbuilder-js/). + +Note: If you're looking for a full DOM parser, you probably want +[JSDom](https://github.com/tmpvar/jsdom). + +Installation +============ + +Simplest way to install `xml2js` is to use [npm](http://npmjs.org), just `npm +install xml2js` which will download xml2js and all dependencies. + +xml2js is also available via [Bower](http://bower.io/), just `bower install +xml2js` which will download xml2js and all dependencies. + +Usage +===== + +No extensive tutorials required because you are a smart developer! The task of +parsing XML should be an easy one, so let's make it so! Here's some examples. + +Shoot-and-forget usage +---------------------- + +You want to parse XML as simple and easy as possible? It's dangerous to go +alone, take this: + +```javascript +var parseString = require('xml2js').parseString; +var xml = "Hello xml2js!" +parseString(xml, function (err, result) { + console.dir(result); +}); +``` + +Can't get easier than this, right? This works starting with `xml2js` 0.2.3. +With CoffeeScript it looks like this: + +```coffeescript +{parseString} = require 'xml2js' +xml = "Hello xml2js!" +parseString xml, (err, result) -> + console.dir result +``` + +If you need some special options, fear not, `xml2js` supports a number of +options (see below), you can specify these as second argument: + +```javascript +parseString(xml, {trim: true}, function (err, result) { +}); +``` + +Simple as pie usage +------------------- + +That's right, if you have been using xml-simple or a home-grown +wrapper, this was added in 0.1.11 just for you: + +```javascript +var fs = require('fs'), + xml2js = require('xml2js'); + +var parser = new xml2js.Parser(); +fs.readFile(__dirname + '/foo.xml', function(err, data) { + parser.parseString(data, function (err, result) { + console.dir(result); + console.log('Done'); + }); +}); +``` + +Look ma, no event listeners! + +You can also use `xml2js` from +[CoffeeScript](https://github.com/jashkenas/coffeescript), further reducing +the clutter: + +```coffeescript +fs = require 'fs', +xml2js = require 'xml2js' + +parser = new xml2js.Parser() +fs.readFile __dirname + '/foo.xml', (err, data) -> + parser.parseString data, (err, result) -> + console.dir result + console.log 'Done.' +``` + +But what happens if you forget the `new` keyword to create a new `Parser`? In +the middle of a nightly coding session, it might get lost, after all. Worry +not, we got you covered! Starting with 0.2.8 you can also leave it out, in +which case `xml2js` will helpfully add it for you, no bad surprises and +inexplicable bugs! + +Promise usage +------------- + +```javascript +var xml2js = require('xml2js'); +var xml = ''; + +// With parser +var parser = new xml2js.Parser(/* options */); +parser.parseStringPromise(data).then(function (result) { + console.dir(result); + console.log('Done'); +}) +.catch(function (err) { + // Failed +}); + +// Without parser +xml2js.parseStringPromise(data /*, options */).then(function (result) { + console.dir(result); + console.log('Done'); +}) +.catch(function (err) { + // Failed +}); +``` + +Parsing multiple files +---------------------- + +If you want to parse multiple files, you have multiple possibilities: + + * You can create one `xml2js.Parser` per file. That's the recommended one + and is promised to always *just work*. + * You can call `reset()` on your parser object. + * You can hope everything goes well anyway. This behaviour is not + guaranteed work always, if ever. Use option #1 if possible. Thanks! + +So you wanna some JSON? +----------------------- + +Just wrap the `result` object in a call to `JSON.stringify` like this +`JSON.stringify(result)`. You get a string containing the JSON representation +of the parsed object that you can feed to JSON-hungry consumers. + +Displaying results +------------------ + +You might wonder why, using `console.dir` or `console.log` the output at some +level is only `[Object]`. Don't worry, this is not because `xml2js` got lazy. +That's because Node uses `util.inspect` to convert the object into strings and +that function stops after `depth=2` which is a bit low for most XML. + +To display the whole deal, you can use `console.log(util.inspect(result, false, +null))`, which displays the whole result. + +So much for that, but what if you use +[eyes](https://github.com/cloudhead/eyes.js) for nice colored output and it +truncates the output with `…`? Don't fear, there's also a solution for that, +you just need to increase the `maxLength` limit by creating a custom inspector +`var inspect = require('eyes').inspector({maxLength: false})` and then you can +easily `inspect(result)`. + +XML builder usage +----------------- + +Since 0.4.0, objects can be also be used to build XML: + +```javascript +var xml2js = require('xml2js'); + +var obj = {name: "Super", Surname: "Man", age: 23}; + +var builder = new xml2js.Builder(); +var xml = builder.buildObject(obj); +``` + +At the moment, a one to one bi-directional conversion is guaranteed only for +default configuration, except for `attrkey`, `charkey` and `explicitArray` options +you can redefine to your taste. Writing CDATA is supported via setting the `cdata` +option to `true`. + +To specify attributes: +```javascript +var xml2js = require('xml2js'); + +var obj = {root: {$: {id: "my id"}, _: "my inner text"}}; + +var builder = new xml2js.Builder(); +var xml = builder.buildObject(obj); +``` + +### Adding xmlns attributes + +You can generate XML that declares XML namespace prefix / URI pairs with xmlns attributes. + +Example declaring a default namespace on the root element: + +```javascript +let obj = { + Foo: { + $: { + "xmlns": "http://foo.com" + } + } +}; +``` +Result of `buildObject(obj)`: +```xml + +``` +Example declaring non-default namespaces on non-root elements: +```javascript +let obj = { + 'foo:Foo': { + $: { + 'xmlns:foo': 'http://foo.com' + }, + 'bar:Bar': { + $: { + 'xmlns:bar': 'http://bar.com' + } + } + } +} +``` +Result of `buildObject(obj)`: +```xml + + + +``` + + +Processing attribute, tag names and values +------------------------------------------ + +Since 0.4.1 you can optionally provide the parser with attribute name and tag name processors as well as element value processors (Since 0.4.14, you can also optionally provide the parser with attribute value processors): + +```javascript + +function nameToUpperCase(name){ + return name.toUpperCase(); +} + +//transform all attribute and tag names and values to uppercase +parseString(xml, { + tagNameProcessors: [nameToUpperCase], + attrNameProcessors: [nameToUpperCase], + valueProcessors: [nameToUpperCase], + attrValueProcessors: [nameToUpperCase]}, + function (err, result) { + // processed data +}); +``` + +The `tagNameProcessors` and `attrNameProcessors` options +accept an `Array` of functions with the following signature: + +```javascript +function (name){ + //do something with `name` + return name +} +``` + +The `attrValueProcessors` and `valueProcessors` options +accept an `Array` of functions with the following signature: + +```javascript +function (value, name) { + //`name` will be the node name or attribute name + //do something with `value`, (optionally) dependent on the node/attr name + return value +} +``` + +Some processors are provided out-of-the-box and can be found in `lib/processors.js`: + +- `normalize`: transforms the name to lowercase. +(Automatically used when `options.normalize` is set to `true`) + +- `firstCharLowerCase`: transforms the first character to lower case. +E.g. 'MyTagName' becomes 'myTagName' + +- `stripPrefix`: strips the xml namespace prefix. E.g `` will become 'Bar'. +(N.B.: the `xmlns` prefix is NOT stripped.) + +- `parseNumbers`: parses integer-like strings as integers and float-like strings as floats +E.g. "0" becomes 0 and "15.56" becomes 15.56 + +- `parseBooleans`: parses boolean-like strings to booleans +E.g. "true" becomes true and "False" becomes false + +Options +======= + +Apart from the default settings, there are a number of options that can be +specified for the parser. Options are specified by ``new Parser({optionName: +value})``. Possible options are: + + * `attrkey` (default: `$`): Prefix that is used to access the attributes. + Version 0.1 default was `@`. + * `charkey` (default: `_`): Prefix that is used to access the character + content. Version 0.1 default was `#`. + * `explicitCharkey` (default: `false`) + * `trim` (default: `false`): Trim the whitespace at the beginning and end of + text nodes. + * `normalizeTags` (default: `false`): Normalize all tag names to lowercase. + * `normalize` (default: `false`): Trim whitespaces inside text nodes. + * `explicitRoot` (default: `true`): Set this if you want to get the root + node in the resulting object. + * `emptyTag` (default: `''`): what will the value of empty nodes be. + * `explicitArray` (default: `true`): Always put child nodes in an array if + true; otherwise an array is created only if there is more than one. + * `ignoreAttrs` (default: `false`): Ignore all XML attributes and only create + text nodes. + * `mergeAttrs` (default: `false`): Merge attributes and child elements as + properties of the parent, instead of keying attributes off a child + attribute object. This option is ignored if `ignoreAttrs` is `true`. + * `validator` (default `null`): You can specify a callable that validates + the resulting structure somehow, however you want. See unit tests + for an example. + * `xmlns` (default `false`): Give each element a field usually called '$ns' + (the first character is the same as attrkey) that contains its local name + and namespace URI. + * `explicitChildren` (default `false`): Put child elements to separate + property. Doesn't work with `mergeAttrs = true`. If element has no children + then "children" won't be created. Added in 0.2.5. + * `childkey` (default `$$`): Prefix that is used to access child elements if + `explicitChildren` is set to `true`. Added in 0.2.5. + * `preserveChildrenOrder` (default `false`): Modifies the behavior of + `explicitChildren` so that the value of the "children" property becomes an + ordered array. When this is `true`, every node will also get a `#name` field + whose value will correspond to the XML nodeName, so that you may iterate + the "children" array and still be able to determine node names. The named + (and potentially unordered) properties are also retained in this + configuration at the same level as the ordered "children" array. Added in + 0.4.9. + * `charsAsChildren` (default `false`): Determines whether chars should be + considered children if `explicitChildren` is on. Added in 0.2.5. + * `includeWhiteChars` (default `false`): Determines whether whitespace-only + text nodes should be included. Added in 0.4.17. + * `async` (default `false`): Should the callbacks be async? This *might* be + an incompatible change if your code depends on sync execution of callbacks. + Future versions of `xml2js` might change this default, so the recommendation + is to not depend on sync execution anyway. Added in 0.2.6. + * `strict` (default `true`): Set sax-js to strict or non-strict parsing mode. + Defaults to `true` which is *highly* recommended, since parsing HTML which + is not well-formed XML might yield just about anything. Added in 0.2.7. + * `attrNameProcessors` (default: `null`): Allows the addition of attribute + name processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (name){ + //do something with `name` + return name + } + ``` + Added in 0.4.14 + * `attrValueProcessors` (default: `null`): Allows the addition of attribute + value processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (value, name){ + //do something with `name` + return name + } + ``` + Added in 0.4.1 + * `tagNameProcessors` (default: `null`): Allows the addition of tag name + processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (name){ + //do something with `name` + return name + } + ``` + Added in 0.4.1 + * `valueProcessors` (default: `null`): Allows the addition of element value + processing functions. Accepts an `Array` of functions with following + signature: + ```javascript + function (value, name){ + //do something with `name` + return name + } + ``` + Added in 0.4.6 + +Options for the `Builder` class +------------------------------- +These options are specified by ``new Builder({optionName: value})``. +Possible options are: + + * `attrkey` (default: `$`): Prefix that is used to access the attributes. + Version 0.1 default was `@`. + * `charkey` (default: `_`): Prefix that is used to access the character + content. Version 0.1 default was `#`. + * `rootName` (default `root` or the root key name): root element name to be used in case + `explicitRoot` is `false` or to override the root element name. + * `renderOpts` (default `{ 'pretty': true, 'indent': ' ', 'newline': '\n' }`): + Rendering options for xmlbuilder-js. + * pretty: prettify generated XML + * indent: whitespace for indentation (only when pretty) + * newline: newline char (only when pretty) + * `xmldec` (default `{ 'version': '1.0', 'encoding': 'UTF-8', 'standalone': true }`: + XML declaration attributes. + * `xmldec.version` A version number string, e.g. 1.0 + * `xmldec.encoding` Encoding declaration, e.g. UTF-8 + * `xmldec.standalone` standalone document declaration: true or false + * `doctype` (default `null`): optional DTD. Eg. `{'ext': 'hello.dtd'}` + * `headless` (default: `false`): omit the XML header. Added in 0.4.3. + * `allowSurrogateChars` (default: `false`): allows using characters from the Unicode + surrogate blocks. + * `cdata` (default: `false`): wrap text nodes in `` instead of + escaping when necessary. Does not add `` if it is not required. + Added in 0.4.5. + +`renderOpts`, `xmldec`,`doctype` and `headless` pass through to +[xmlbuilder-js](https://github.com/oozcitak/xmlbuilder-js). + +Updating to new version +======================= + +Version 0.2 changed the default parsing settings, but version 0.1.14 introduced +the default settings for version 0.2, so these settings can be tried before the +migration. + +```javascript +var xml2js = require('xml2js'); +var parser = new xml2js.Parser(xml2js.defaults["0.2"]); +``` + +To get the 0.1 defaults in version 0.2 you can just use +`xml2js.defaults["0.1"]` in the same place. This provides you with enough time +to migrate to the saner way of parsing in `xml2js` 0.2. We try to make the +migration as simple and gentle as possible, but some breakage cannot be +avoided. + +So, what exactly did change and why? In 0.2 we changed some defaults to parse +the XML in a more universal and sane way. So we disabled `normalize` and `trim` +so `xml2js` does not cut out any text content. You can reenable this at will of +course. A more important change is that we return the root tag in the resulting +JavaScript structure via the `explicitRoot` setting, so you need to access the +first element. This is useful for anybody who wants to know what the root node +is and preserves more information. The last major change was to enable +`explicitArray`, so everytime it is possible that one might embed more than one +sub-tag into a tag, xml2js >= 0.2 returns an array even if the array just +includes one element. This is useful when dealing with APIs that return +variable amounts of subtags. + +Running tests, development +========================== + +[![Build Status](https://travis-ci.org/Leonidas-from-XIV/node-xml2js.svg?branch=master)](https://travis-ci.org/Leonidas-from-XIV/node-xml2js) +[![Coverage Status](https://coveralls.io/repos/Leonidas-from-XIV/node-xml2js/badge.svg?branch=)](https://coveralls.io/r/Leonidas-from-XIV/node-xml2js?branch=master) +[![Dependency Status](https://david-dm.org/Leonidas-from-XIV/node-xml2js.svg)](https://david-dm.org/Leonidas-from-XIV/node-xml2js) + +The development requirements are handled by npm, you just need to install them. +We also have a number of unit tests, they can be run using `npm test` directly +from the project root. This runs zap to discover all the tests and execute +them. + +If you like to contribute, keep in mind that `xml2js` is written in +CoffeeScript, so don't develop on the JavaScript files that are checked into +the repository for convenience reasons. Also, please write some unit test to +check your behaviour and if it is some user-facing thing, add some +documentation to this README, so people will know it exists. Thanks in advance! + +Getting support +=============== + +Please, if you have a problem with the library, first make sure you read this +README. If you read this far, thanks, you're good. Then, please make sure your +problem really is with `xml2js`. It is? Okay, then I'll look at it. Send me a +mail and we can talk. Please don't open issues, as I don't think that is the +proper forum for support problems. Some problems might as well really be bugs +in `xml2js`, if so I'll let you know to open an issue instead :) + +But if you know you really found a bug, feel free to open an issue instead. diff --git a/node_modules/xml2js/lib/bom.js b/node_modules/xml2js/lib/bom.js new file mode 100644 index 00000000..7b8fb27b --- /dev/null +++ b/node_modules/xml2js/lib/bom.js @@ -0,0 +1,12 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + exports.stripBOM = function(str) { + if (str[0] === '\uFEFF') { + return str.substring(1); + } else { + return str; + } + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/builder.js b/node_modules/xml2js/lib/builder.js new file mode 100644 index 00000000..58f36384 --- /dev/null +++ b/node_modules/xml2js/lib/builder.js @@ -0,0 +1,127 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var builder, defaults, escapeCDATA, requiresCDATA, wrapCDATA, + hasProp = {}.hasOwnProperty; + + builder = require('xmlbuilder'); + + defaults = require('./defaults').defaults; + + requiresCDATA = function(entry) { + return typeof entry === "string" && (entry.indexOf('&') >= 0 || entry.indexOf('>') >= 0 || entry.indexOf('<') >= 0); + }; + + wrapCDATA = function(entry) { + return ""; + }; + + escapeCDATA = function(entry) { + return entry.replace(']]>', ']]]]>'); + }; + + exports.Builder = (function() { + function Builder(opts) { + var key, ref, value; + this.options = {}; + ref = defaults["0.2"]; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this.options[key] = value; + } + for (key in opts) { + if (!hasProp.call(opts, key)) continue; + value = opts[key]; + this.options[key] = value; + } + } + + Builder.prototype.buildObject = function(rootObj) { + var attrkey, charkey, render, rootElement, rootName; + attrkey = this.options.attrkey; + charkey = this.options.charkey; + if ((Object.keys(rootObj).length === 1) && (this.options.rootName === defaults['0.2'].rootName)) { + rootName = Object.keys(rootObj)[0]; + rootObj = rootObj[rootName]; + } else { + rootName = this.options.rootName; + } + render = (function(_this) { + return function(element, obj) { + var attr, child, entry, index, key, value; + if (typeof obj !== 'object') { + if (_this.options.cdata && requiresCDATA(obj)) { + element.raw(wrapCDATA(obj)); + } else { + element.txt(obj); + } + } else if (Array.isArray(obj)) { + for (index in obj) { + if (!hasProp.call(obj, index)) continue; + child = obj[index]; + for (key in child) { + entry = child[key]; + element = render(element.ele(key), entry).up(); + } + } + } else { + for (key in obj) { + if (!hasProp.call(obj, key)) continue; + child = obj[key]; + if (key === attrkey) { + if (typeof child === "object") { + for (attr in child) { + value = child[attr]; + element = element.att(attr, value); + } + } + } else if (key === charkey) { + if (_this.options.cdata && requiresCDATA(child)) { + element = element.raw(wrapCDATA(child)); + } else { + element = element.txt(child); + } + } else if (Array.isArray(child)) { + for (index in child) { + if (!hasProp.call(child, index)) continue; + entry = child[index]; + if (typeof entry === 'string') { + if (_this.options.cdata && requiresCDATA(entry)) { + element = element.ele(key).raw(wrapCDATA(entry)).up(); + } else { + element = element.ele(key, entry).up(); + } + } else { + element = render(element.ele(key), entry).up(); + } + } + } else if (typeof child === "object") { + element = render(element.ele(key), child).up(); + } else { + if (typeof child === 'string' && _this.options.cdata && requiresCDATA(child)) { + element = element.ele(key).raw(wrapCDATA(child)).up(); + } else { + if (child == null) { + child = ''; + } + element = element.ele(key, child.toString()).up(); + } + } + } + } + return element; + }; + })(this); + rootElement = builder.create(rootName, this.options.xmldec, this.options.doctype, { + headless: this.options.headless, + allowSurrogateChars: this.options.allowSurrogateChars + }); + return render(rootElement, rootObj).end(this.options.renderOpts); + }; + + return Builder; + + })(); + +}).call(this); diff --git a/node_modules/xml2js/lib/defaults.js b/node_modules/xml2js/lib/defaults.js new file mode 100644 index 00000000..0a21da0a --- /dev/null +++ b/node_modules/xml2js/lib/defaults.js @@ -0,0 +1,72 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + exports.defaults = { + "0.1": { + explicitCharkey: false, + trim: true, + normalize: true, + normalizeTags: false, + attrkey: "@", + charkey: "#", + explicitArray: false, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: false, + validator: null, + xmlns: false, + explicitChildren: false, + childkey: '@@', + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: null, + attrValueProcessors: null, + tagNameProcessors: null, + valueProcessors: null, + emptyTag: '' + }, + "0.2": { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: "$", + charkey: "_", + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: null, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: '$$', + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: null, + attrValueProcessors: null, + tagNameProcessors: null, + valueProcessors: null, + rootName: 'root', + xmldec: { + 'version': '1.0', + 'encoding': 'UTF-8', + 'standalone': true + }, + doctype: null, + renderOpts: { + 'pretty': true, + 'indent': ' ', + 'newline': '\n' + }, + headless: false, + chunkSize: 10000, + emptyTag: '', + cdata: false + } + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/parser.js b/node_modules/xml2js/lib/parser.js new file mode 100644 index 00000000..59f4d545 --- /dev/null +++ b/node_modules/xml2js/lib/parser.js @@ -0,0 +1,381 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var bom, defaults, events, isEmpty, processItem, processors, sax, setImmediate, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + sax = require('sax'); + + events = require('events'); + + bom = require('./bom'); + + processors = require('./processors'); + + setImmediate = require('timers').setImmediate; + + defaults = require('./defaults').defaults; + + isEmpty = function(thing) { + return typeof thing === "object" && (thing != null) && Object.keys(thing).length === 0; + }; + + processItem = function(processors, item, key) { + var i, len, process; + for (i = 0, len = processors.length; i < len; i++) { + process = processors[i]; + item = process(item, key); + } + return item; + }; + + exports.Parser = (function(superClass) { + extend(Parser, superClass); + + function Parser(opts) { + this.parseStringPromise = bind(this.parseStringPromise, this); + this.parseString = bind(this.parseString, this); + this.reset = bind(this.reset, this); + this.assignOrPush = bind(this.assignOrPush, this); + this.processAsync = bind(this.processAsync, this); + var key, ref, value; + if (!(this instanceof exports.Parser)) { + return new exports.Parser(opts); + } + this.options = {}; + ref = defaults["0.2"]; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this.options[key] = value; + } + for (key in opts) { + if (!hasProp.call(opts, key)) continue; + value = opts[key]; + this.options[key] = value; + } + if (this.options.xmlns) { + this.options.xmlnskey = this.options.attrkey + "ns"; + } + if (this.options.normalizeTags) { + if (!this.options.tagNameProcessors) { + this.options.tagNameProcessors = []; + } + this.options.tagNameProcessors.unshift(processors.normalize); + } + this.reset(); + } + + Parser.prototype.processAsync = function() { + var chunk, err; + try { + if (this.remaining.length <= this.options.chunkSize) { + chunk = this.remaining; + this.remaining = ''; + this.saxParser = this.saxParser.write(chunk); + return this.saxParser.close(); + } else { + chunk = this.remaining.substr(0, this.options.chunkSize); + this.remaining = this.remaining.substr(this.options.chunkSize, this.remaining.length); + this.saxParser = this.saxParser.write(chunk); + return setImmediate(this.processAsync); + } + } catch (error1) { + err = error1; + if (!this.saxParser.errThrown) { + this.saxParser.errThrown = true; + return this.emit(err); + } + } + }; + + Parser.prototype.assignOrPush = function(obj, key, newValue) { + if (!(key in obj)) { + if (!this.options.explicitArray) { + return obj[key] = newValue; + } else { + return obj[key] = [newValue]; + } + } else { + if (!(obj[key] instanceof Array)) { + obj[key] = [obj[key]]; + } + return obj[key].push(newValue); + } + }; + + Parser.prototype.reset = function() { + var attrkey, charkey, ontext, stack; + this.removeAllListeners(); + this.saxParser = sax.parser(this.options.strict, { + trim: false, + normalize: false, + xmlns: this.options.xmlns + }); + this.saxParser.errThrown = false; + this.saxParser.onerror = (function(_this) { + return function(error) { + _this.saxParser.resume(); + if (!_this.saxParser.errThrown) { + _this.saxParser.errThrown = true; + return _this.emit("error", error); + } + }; + })(this); + this.saxParser.onend = (function(_this) { + return function() { + if (!_this.saxParser.ended) { + _this.saxParser.ended = true; + return _this.emit("end", _this.resultObject); + } + }; + })(this); + this.saxParser.ended = false; + this.EXPLICIT_CHARKEY = this.options.explicitCharkey; + this.resultObject = null; + stack = []; + attrkey = this.options.attrkey; + charkey = this.options.charkey; + this.saxParser.onopentag = (function(_this) { + return function(node) { + var key, newValue, obj, processedKey, ref; + obj = {}; + obj[charkey] = ""; + if (!_this.options.ignoreAttrs) { + ref = node.attributes; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + if (!(attrkey in obj) && !_this.options.mergeAttrs) { + obj[attrkey] = {}; + } + newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key]; + processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key; + if (_this.options.mergeAttrs) { + _this.assignOrPush(obj, processedKey, newValue); + } else { + obj[attrkey][processedKey] = newValue; + } + } + } + obj["#name"] = _this.options.tagNameProcessors ? processItem(_this.options.tagNameProcessors, node.name) : node.name; + if (_this.options.xmlns) { + obj[_this.options.xmlnskey] = { + uri: node.uri, + local: node.local + }; + } + return stack.push(obj); + }; + })(this); + this.saxParser.onclosetag = (function(_this) { + return function() { + var cdata, emptyStr, key, node, nodeName, obj, objClone, old, s, xpath; + obj = stack.pop(); + nodeName = obj["#name"]; + if (!_this.options.explicitChildren || !_this.options.preserveChildrenOrder) { + delete obj["#name"]; + } + if (obj.cdata === true) { + cdata = obj.cdata; + delete obj.cdata; + } + s = stack[stack.length - 1]; + if (obj[charkey].match(/^\s*$/) && !cdata) { + emptyStr = obj[charkey]; + delete obj[charkey]; + } else { + if (_this.options.trim) { + obj[charkey] = obj[charkey].trim(); + } + if (_this.options.normalize) { + obj[charkey] = obj[charkey].replace(/\s{2,}/g, " ").trim(); + } + obj[charkey] = _this.options.valueProcessors ? processItem(_this.options.valueProcessors, obj[charkey], nodeName) : obj[charkey]; + if (Object.keys(obj).length === 1 && charkey in obj && !_this.EXPLICIT_CHARKEY) { + obj = obj[charkey]; + } + } + if (isEmpty(obj)) { + obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr; + } + if (_this.options.validator != null) { + xpath = "/" + ((function() { + var i, len, results; + results = []; + for (i = 0, len = stack.length; i < len; i++) { + node = stack[i]; + results.push(node["#name"]); + } + return results; + })()).concat(nodeName).join("/"); + (function() { + var err; + try { + return obj = _this.options.validator(xpath, s && s[nodeName], obj); + } catch (error1) { + err = error1; + return _this.emit("error", err); + } + })(); + } + if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') { + if (!_this.options.preserveChildrenOrder) { + node = {}; + if (_this.options.attrkey in obj) { + node[_this.options.attrkey] = obj[_this.options.attrkey]; + delete obj[_this.options.attrkey]; + } + if (!_this.options.charsAsChildren && _this.options.charkey in obj) { + node[_this.options.charkey] = obj[_this.options.charkey]; + delete obj[_this.options.charkey]; + } + if (Object.getOwnPropertyNames(obj).length > 0) { + node[_this.options.childkey] = obj; + } + obj = node; + } else if (s) { + s[_this.options.childkey] = s[_this.options.childkey] || []; + objClone = {}; + for (key in obj) { + if (!hasProp.call(obj, key)) continue; + objClone[key] = obj[key]; + } + s[_this.options.childkey].push(objClone); + delete obj["#name"]; + if (Object.keys(obj).length === 1 && charkey in obj && !_this.EXPLICIT_CHARKEY) { + obj = obj[charkey]; + } + } + } + if (stack.length > 0) { + return _this.assignOrPush(s, nodeName, obj); + } else { + if (_this.options.explicitRoot) { + old = obj; + obj = {}; + obj[nodeName] = old; + } + _this.resultObject = obj; + _this.saxParser.ended = true; + return _this.emit("end", _this.resultObject); + } + }; + })(this); + ontext = (function(_this) { + return function(text) { + var charChild, s; + s = stack[stack.length - 1]; + if (s) { + s[charkey] += text; + if (_this.options.explicitChildren && _this.options.preserveChildrenOrder && _this.options.charsAsChildren && (_this.options.includeWhiteChars || text.replace(/\\n/g, '').trim() !== '')) { + s[_this.options.childkey] = s[_this.options.childkey] || []; + charChild = { + '#name': '__text__' + }; + charChild[charkey] = text; + if (_this.options.normalize) { + charChild[charkey] = charChild[charkey].replace(/\s{2,}/g, " ").trim(); + } + s[_this.options.childkey].push(charChild); + } + return s; + } + }; + })(this); + this.saxParser.ontext = ontext; + return this.saxParser.oncdata = (function(_this) { + return function(text) { + var s; + s = ontext(text); + if (s) { + return s.cdata = true; + } + }; + })(this); + }; + + Parser.prototype.parseString = function(str, cb) { + var err; + if ((cb != null) && typeof cb === "function") { + this.on("end", function(result) { + this.reset(); + return cb(null, result); + }); + this.on("error", function(err) { + this.reset(); + return cb(err); + }); + } + try { + str = str.toString(); + if (str.trim() === '') { + this.emit("end", null); + return true; + } + str = bom.stripBOM(str); + if (this.options.async) { + this.remaining = str; + setImmediate(this.processAsync); + return this.saxParser; + } + return this.saxParser.write(str).close(); + } catch (error1) { + err = error1; + if (!(this.saxParser.errThrown || this.saxParser.ended)) { + this.emit('error', err); + return this.saxParser.errThrown = true; + } else if (this.saxParser.ended) { + throw err; + } + } + }; + + Parser.prototype.parseStringPromise = function(str) { + return new Promise((function(_this) { + return function(resolve, reject) { + return _this.parseString(str, function(err, value) { + if (err) { + return reject(err); + } else { + return resolve(value); + } + }); + }; + })(this)); + }; + + return Parser; + + })(events); + + exports.parseString = function(str, a, b) { + var cb, options, parser; + if (b != null) { + if (typeof b === 'function') { + cb = b; + } + if (typeof a === 'object') { + options = a; + } + } else { + if (typeof a === 'function') { + cb = a; + } + options = {}; + } + parser = new exports.Parser(options); + return parser.parseString(str, cb); + }; + + exports.parseStringPromise = function(str, a) { + var options, parser; + if (typeof a === 'object') { + options = a; + } + parser = new exports.Parser(options); + return parser.parseStringPromise(str); + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/processors.js b/node_modules/xml2js/lib/processors.js new file mode 100644 index 00000000..89aa85f2 --- /dev/null +++ b/node_modules/xml2js/lib/processors.js @@ -0,0 +1,34 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var prefixMatch; + + prefixMatch = new RegExp(/(?!xmlns)^.*:/); + + exports.normalize = function(str) { + return str.toLowerCase(); + }; + + exports.firstCharLowerCase = function(str) { + return str.charAt(0).toLowerCase() + str.slice(1); + }; + + exports.stripPrefix = function(str) { + return str.replace(prefixMatch, ''); + }; + + exports.parseNumbers = function(str) { + if (!isNaN(str)) { + str = str % 1 === 0 ? parseInt(str, 10) : parseFloat(str); + } + return str; + }; + + exports.parseBooleans = function(str) { + if (/^(?:true|false)$/i.test(str)) { + str = str.toLowerCase() === 'true'; + } + return str; + }; + +}).call(this); diff --git a/node_modules/xml2js/lib/xml2js.js b/node_modules/xml2js/lib/xml2js.js new file mode 100644 index 00000000..24b6e699 --- /dev/null +++ b/node_modules/xml2js/lib/xml2js.js @@ -0,0 +1,39 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + "use strict"; + var builder, defaults, parser, processors, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + defaults = require('./defaults'); + + builder = require('./builder'); + + parser = require('./parser'); + + processors = require('./processors'); + + exports.defaults = defaults.defaults; + + exports.processors = processors; + + exports.ValidationError = (function(superClass) { + extend(ValidationError, superClass); + + function ValidationError(message) { + this.message = message; + } + + return ValidationError; + + })(Error); + + exports.Builder = builder.Builder; + + exports.Parser = parser.Parser; + + exports.parseString = parser.parseString; + + exports.parseStringPromise = parser.parseStringPromise; + +}).call(this); diff --git a/node_modules/xml2js/package.json b/node_modules/xml2js/package.json new file mode 100644 index 00000000..0769b611 --- /dev/null +++ b/node_modules/xml2js/package.json @@ -0,0 +1,92 @@ +{ + "name": "xml2js", + "description": "Simple XML to JavaScript object converter.", + "keywords": [ + "xml", + "json" + ], + "homepage": "https://github.com/Leonidas-from-XIV/node-xml2js", + "version": "0.4.23", + "author": "Marek Kubica (https://xivilization.net)", + "contributors": [ + "maqr (https://github.com/maqr)", + "Ben Weaver (http://benweaver.com/)", + "Jae Kwon (https://github.com/jaekwon)", + "Jim Robert", + "Ștefan Rusu (http://www.saltwaterc.eu/)", + "Carter Cole (http://cartercole.com/)", + "Kurt Raschke (http://www.kurtraschke.com/)", + "Contra (https://github.com/Contra)", + "Marcelo Diniz (https://github.com/mdiniz)", + "Michael Hart (https://github.com/mhart)", + "Zachary Scott (http://zacharyscott.net/)", + "Raoul Millais (https://github.com/raoulmillais)", + "Salsita Software (http://www.salsitasoft.com/)", + "Mike Schilling (http://www.emotive.com/)", + "Jackson Tian (http://weibo.com/shyvo)", + "Mikhail Zyatin (https://github.com/Sitin)", + "Chris Tavares (https://github.com/christav)", + "Frank Xu (http://f2e.us/)", + "Guido D'Albore (http://www.bitstorm.it/)", + "Jack Senechal (http://jacksenechal.com/)", + "Matthias Hölzl (https://github.com/hoelzl)", + "Camille Reynders (http://www.creynders.be/)", + "Taylor Gautier (https://github.com/tsgautier)", + "Todd Bryan (https://github.com/toddrbryan)", + "Leore Avidar (http://leoreavidar.com/)", + "Dave Aitken (http://www.actionshrimp.com/)", + "Shaney Orrowe ", + "Candle ", + "Jess Telford (http://jes.st)", + "Tom Hughes < (http://compton.nu/)", + "Piotr Rochala (http://rocha.la/)", + "Michael Avila (https://github.com/michaelavila)", + "Ryan Gahl (https://github.com/ryedin)", + "Eric Laberge (https://github.com/elaberge)", + "Benjamin E. Coe (https://twitter.com/benjamincoe)", + "Stephen Cresswell (https://github.com/cressie176)", + "Pascal Ehlert (http://www.hacksrus.net/)", + "Tom Spencer (http://fiznool.com/)", + "Tristian Flanagan (https://github.com/tflanagan)", + "Tim Johns (https://github.com/TimJohns)", + "Bogdan Chadkin (https://github.com/TrySound)", + "David Wood (http://codesleuth.co.uk/)", + "Nicolas Maquet (https://github.com/nmaquet)", + "Lovell Fuller (http://lovell.info/)", + "d3adc0d3 (https://github.com/d3adc0d3)" + ], + "main": "./lib/xml2js", + "files": [ + "lib" + ], + "directories": { + "lib": "./lib" + }, + "scripts": { + "build": "cake build", + "test": "zap", + "coverage": "nyc npm test && nyc report", + "coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls", + "doc": "cake doc" + }, + "repository": { + "type": "git", + "url": "https://github.com/Leonidas-from-XIV/node-xml2js.git" + }, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "devDependencies": { + "coffee-script": ">=1.10.0", + "coveralls": "^3.0.1", + "diff": ">=1.0.8", + "docco": ">=0.6.2", + "nyc": ">=2.2.1", + "zap": ">=0.2.9" + }, + "engines": { + "node": ">=4.0.0" + }, + "license": "MIT" +} diff --git a/node_modules/xmlbuilder/CHANGELOG.md b/node_modules/xmlbuilder/CHANGELOG.md new file mode 100644 index 00000000..610f412e --- /dev/null +++ b/node_modules/xmlbuilder/CHANGELOG.md @@ -0,0 +1,470 @@ +# Change Log + +All notable changes to this project are documented in this file. This project adheres to [Semantic Versioning](http://semver.org/#semantic-versioning-200). + +## [11.0.0] - 2019-02-18 +- Calling `end()` with arguments no longer overwrites writer options. See [#120](https://github.com/oozcitak/xmlbuilder-js/issues/120). +- Added writer state and customizable space and endline functions to help customize writer behavior. Also added `openNode` and `closeNode` functions to writer. See [#193](https://github.com/oozcitak/xmlbuilder-js/issues/193). +- Fixed a bug where writer functions would not be called for nodes with a single child node in pretty print mode. See [#195](https://github.com/oozcitak/xmlbuilder-js/issues/195). +- Renamed `elEscape` to `textEscape` in `XMLStringifier`. +- Fixed a bug where empty arrays would produce child nodes. See [#190](https://github.com/oozcitak/xmlbuilder-js/issues/190). +- Removed the `skipNullAttributes` option. `null` attributes are now skipped by default. Added the `keepNullAttributes` option in case someone needs the old behavior. +- Removed the `skipNullNodes` option. `null` nodes are now skipped by default. Added the `keepNullNodes` option in case someone needs the old behavior. +- `undefined` values are now skipped when converting JS objects. +- Renamed stringify functions. See [#194](https://github.com/oozcitak/xmlbuilder-js/issues/194): + * `eleName` -> `name` + * `attName` -> `name` + * `eleText` -> `text` +- Fixed argument order for `attribute` function in the writer. See [#196](https://github.com/oozcitak/xmlbuilder-js/issues/196). +- Added `openAttribute` and `closeAttribute` functions to writer. See [#196](https://github.com/oozcitak/xmlbuilder-js/issues/196). +- Added node types to node objects. Node types and writer states are exported by the module with the `nodeType` and `writerState` properties. +- Fixed a bug where array items would not be correctly converted. See [#159](https://github.com/oozcitak/xmlbuilder-js/issues/159). +- Fixed a bug where mixed-content inside JS objects with `#text` decorator would not be correctly converted. See [#171](https://github.com/oozcitak/xmlbuilder-js/issues/171). +- Fixed a bug where JS objects would not be expanded in callback mode. See [#173](https://github.com/oozcitak/xmlbuilder-js/issues/173). +- Fixed a bug where character validation would not obey document's XML version. Added separate validation for XML 1.0 and XML 1.1 documents. See [#169](https://github.com/oozcitak/xmlbuilder-js/issues/169). +- Fixed a bug where names would not be validated according to the spec. See [#49](https://github.com/oozcitak/xmlbuilder-js/issues/49). +- Renamed `text` property to `value` in comment and cdata nodes to unify the API. +- Removed `doctype` function to prevent name clash with DOM implementation. Use the `dtd` function instead. +- Removed dummy nodes from the XML tree (Those were created while chain-building the tree). +- Renamed `attributes`property to `attribs` to prevent name clash with DOM property with the same name. +- Implemented the DOM standard (read-only) to support XPath lookups. XML namespaces are not currently supported. See [#122](https://github.com/oozcitak/xmlbuilder-js/issues/122). + +## [10.1.1] - 2018-10-24 +- Fixed an edge case where a null node at root level would be printed although `skipNullNodes` was set. See [#187](https://github.com/oozcitak/xmlbuilder-js/issues/187). + +## [10.1.0] - 2018-10-10 +- Added the `skipNullNodes` option to skip nodes with null values. See [#158](https://github.com/oozcitak/xmlbuilder-js/issues/158). + +## [10.0.0] - 2018-04-26 +- Added current indentation level as a parameter to the onData function when in callback mode. See [#125](https://github.com/oozcitak/xmlbuilder-js/issues/125). +- Added name of the current node and parent node to error messages where possible. See [#152](https://github.com/oozcitak/xmlbuilder-js/issues/152). This has the potential to break code depending on the content of error messages. +- Fixed an issue where objects created with Object.create(null) created an error. See [#176](https://github.com/oozcitak/xmlbuilder-js/issues/176). +- Added test builds for node.js v8 and v10. + +## [9.0.7] - 2018-02-09 +- Simplified regex used for validating encoding. + +## [9.0.4] - 2017-08-16 +- `spacebeforeslash` writer option accepts `true` as well as space char(s). + +## [9.0.3] - 2017-08-15 +- `spacebeforeslash` writer option can now be used with XML fragments. + +## [9.0.2] - 2017-08-15 +- Added the `spacebeforeslash` writer option to add a space character before closing tags of empty elements. See +[#157](https://github.com/oozcitak/xmlbuilder-js/issues/157). + +## [9.0.1] - 2017-06-19 +- Fixed character validity checks to work with node.js 4.0 and 5.0. See +[#161](https://github.com/oozcitak/xmlbuilder-js/issues/161). + +## [9.0.0] - 2017-05-05 +- Removed case conversion options. +- Removed support for node.js 4.0 and 5.0. Minimum required version is now 6.0. +- Fixed valid char filter to use XML 1.1 instead of 1.0. See +[#147](https://github.com/oozcitak/xmlbuilder-js/issues/147). +- Added options for negative indentation and suppressing pretty printing of text +nodes. See +[#145](https://github.com/oozcitak/xmlbuilder-js/issues/145). + +## [8.2.2] - 2016-04-08 +- Falsy values can now be used as a text node in callback mode. + +## [8.2.1] - 2016-04-07 +- Falsy values can now be used as a text node. See +[#117](https://github.com/oozcitak/xmlbuilder-js/issues/117). + +## [8.2.0] - 2016-04-01 +- Removed lodash dependency to keep the library small and simple. See +[#114](https://github.com/oozcitak/xmlbuilder-js/issues/114), +[#53](https://github.com/oozcitak/xmlbuilder-js/issues/53), +and [#43](https://github.com/oozcitak/xmlbuilder-js/issues/43). +- Added title case to name conversion options. + +## [8.1.0] - 2016-03-29 +- Added the callback option to the `begin` export function. When used with a +callback function, the XML document will be generated in chunks and each chunk +will be passed to the supplied function. In this mode, `begin` uses a different +code path and the builder should use much less memory since the entire XML tree +is not kept. There are a few drawbacks though. For example, traversing the document +tree or adding attributes to a node after it is written is not possible. It is +also not possible to remove nodes or attributes. + +``` js +var result = ''; + +builder.begin(function(chunk) { result += chunk; }) + .dec() + .ele('root') + .ele('xmlbuilder').up() + .end(); +``` + +- Replaced native `Object.assign` with `lodash.assign` to support old JS engines. See [#111](https://github.com/oozcitak/xmlbuilder-js/issues/111). + +## [8.0.0] - 2016-03-25 +- Added the `begin` export function. See the wiki for details. +- Added the ability to add comments and processing instructions before and after the root element. Added `commentBefore`, `commentAfter`, `instructionBefore` and `instructionAfter` functions for this purpose. +- Dropped support for old node.js releases. Minimum required node.js version is now 4.0. + +## [7.0.0] - 2016-03-21 +- Processing instructions are now created as regular nodes. This is a major breaking change if you are using processing instructions. Previously processing instructions were inserted before their parent node. After this change processing instructions are appended to the children of the parent node. Note that it is not currently possible to insert processing instructions before or after the root element. +```js +root.ele('node').ins('pi'); +// pre-v7 + +// v7 + +``` + +## [6.0.0] - 2016-03-20 +- Added custom XML writers. The default string conversion functions are now collected under the `XMLStringWriter` class which can be accessed by the `stringWriter(options)` function exported by the module. An `XMLStreamWriter` is also added which outputs the XML document to a writable stream. A stream writer can be created by calling the `streamWriter(stream, options)` function exported by the module. Both classes are heavily customizable and the details are added to the wiki. It is also possible to write an XML writer from scratch and use it when calling `end()` on the XML document. + +## [5.0.1] - 2016-03-08 +- Moved require statements for text case conversion to the top of files to reduce lazy requires. + +## [5.0.0] - 2016-03-05 +- Added text case option for element names and attribute names. Valid cases are `lower`, `upper`, `camel`, `kebab` and `snake`. +- Attribute and element values are escaped according to the [Canonical XML 1.0 specification](http://www.w3.org/TR/2000/WD-xml-c14n-20000119.html#charescaping). See [#54](https://github.com/oozcitak/xmlbuilder-js/issues/54) and [#86](https://github.com/oozcitak/xmlbuilder-js/issues/86). +- Added the `allowEmpty` option to `end()`. When this option is set, empty elements are not self-closed. +- Added support for [nested CDATA](https://en.wikipedia.org/wiki/CDATA#Nesting). The triad `]]>` in CDATA is now automatically replaced with `]]]]>`. + +## [4.2.1] - 2016-01-15 +- Updated lodash dependency to 4.0.0. + +## [4.2.0] - 2015-12-16 +- Added the `noDoubleEncoding` option to `create()` to control whether existing html entities are encoded. + +## [4.1.0] - 2015-11-11 +- Added the `separateArrayItems` option to `create()` to control how arrays are handled when converting from objects. e.g. + +```js +root.ele({ number: [ "one", "two" ]}); +// with separateArrayItems: true + + + + +// with separateArrayItems: false +one +two +``` + +## [4.0.0] - 2015-11-01 +- Removed the `#list` decorator. Array items are now created as child nodes by default. +- Fixed a bug where the XML encoding string was checked partially. + +## [3.1.0] - 2015-09-19 +- `#list` decorator ignores empty arrays. + +## [3.0.0] - 2015-09-10 +- Allow `\r`, `\n` and `\t` in attribute values without escaping. See [#86](https://github.com/oozcitak/xmlbuilder-js/issues/86). + +## [2.6.5] - 2015-09-09 +- Use native `isArray` instead of lodash. +- Indentation of processing instructions are set to the parent element's. + +## [2.6.4] - 2015-05-27 +- Updated lodash dependency to 3.5.0. + +## [2.6.3] - 2015-05-27 +- Bumped version because previous release was not published on npm. + +## [2.6.2] - 2015-03-10 +- Updated lodash dependency to 3.5.0. + +## [2.6.1] - 2015-02-20 +- Updated lodash dependency to 3.3.0. + +## [2.6.0] - 2015-02-20 +- Fixed a bug where the `XMLNode` constructor overwrote the super class parent. +- Removed document property from cloned nodes. +- Switched to mocha.js for testing. + +## [2.5.2] - 2015-02-16 +- Updated lodash dependency to 3.2.0. + +## [2.5.1] - 2015-02-09 +- Updated lodash dependency to 3.1.0. +- Support all node >= 0.8. + +## [2.5.0] - 2015-00-03 +- Updated lodash dependency to 3.0.0. + +## [2.4.6] - 2015-01-26 +- Show more information from attribute creation with null values. +- Added iojs as an engine. +- Self close elements with empty text. + +## [2.4.5] - 2014-11-15 +- Fixed prepublish script to run on windows. +- Fixed bug in XMLStringifier where an undefined value was used while reporting an invalid encoding value. +- Moved require statements to the top of files to reduce lazy requires. See [#62](https://github.com/oozcitak/xmlbuilder-js/issues/62). + +## [2.4.4] - 2014-09-08 +- Added the `offset` option to `toString()` for use in XML fragments. + +## [2.4.3] - 2014-08-13 +- Corrected license in package description. + +## [2.4.2] - 2014-08-13 +- Dropped performance test and memwatch dependency. + +## [2.4.1] - 2014-08-12 +- Fixed a bug where empty indent string was omitted when pretty printing. See [#59](https://github.com/oozcitak/xmlbuilder-js/issues/59). + +## [2.4.0] - 2014-08-04 +- Correct cases of pubID and sysID. +- Use single lodash instead of separate npm modules. See [#53](https://github.com/oozcitak/xmlbuilder-js/issues/53). +- Escape according to Canonical XML 1.0. See [#54](https://github.com/oozcitak/xmlbuilder-js/issues/54). + +## [2.3.0] - 2014-07-17 +- Convert objects to JS primitives while sanitizing user input. +- Object builder preserves items with null values. See [#44](https://github.com/oozcitak/xmlbuilder-js/issues/44). +- Use modularized lodash functions to cut down dependencies. +- Process empty objects when converting from objects so that we don't throw on empty child objects. + +## [2.2.1] - 2014-04-04 +- Bumped version because previous release was not published on npm. + +## [2.2.0] - 2014-04-04 +- Switch to lodash from underscore. +- Removed legacy `ext` option from `create()`. +- Drop old node versions: 0.4, 0.5, 0.6. 0.8 is the minimum requirement from now on. + +## [2.1.0] - 2013-12-30 +- Removed duplicate null checks from constructors. +- Fixed node count in performance test. +- Added option for skipping null attribute values. See [#26](https://github.com/oozcitak/xmlbuilder-js/issues/26). +- Allow multiple values in `att()` and `ins()`. +- Added ability to run individual performance tests. +- Added flag for ignoring decorator strings. + +## [2.0.1] - 2013-12-24 +- Removed performance tests from npm package. + +## [2.0.0] - 2013-12-24 +- Combined loops for speed up. +- Added support for the DTD and XML declaration. +- `clone` includes attributes. +- Added performance tests. +- Evaluate attribute value if function. +- Evaluate instruction value if function. + +## [1.1.2] - 2013-12-11 +- Changed processing instruction decorator to `?`. + +## [1.1.1] - 2013-12-11 +- Added processing instructions to JS object conversion. + +## [1.1.0] - 2013-12-10 +- Added license to package. +- `create()` and `element()` accept JS object to fully build the document. +- Added `nod()` and `n()` aliases for `node()`. +- Renamed `convertAttChar` decorator to `convertAttKey`. +- Ignore empty decorator strings when converting JS objects. + +## [1.0.2] - 2013-11-27 +- Removed temp file which was accidentally included in the package. + +## [1.0.1] - 2013-11-27 +- Custom stringify functions affect current instance only. + +## [1.0.0] - 2013-11-27 +- Added processing instructions. +- Added stringify functions to sanitize and convert input values. +- Added option for headless XML documents. +- Added vows tests. +- Removed Makefile. Using npm publish scripts instead. +- Removed the `begin()` function. `create()` begins the document by creating the root node. + +## [0.4.3] - 2013-11-08 +- Added option to include surrogate pairs in XML content. See [#29](https://github.com/oozcitak/xmlbuilder-js/issues/29). +- Fixed empty value string representation in pretty mode. +- Added pre and postpublish scripts to package.json. +- Filtered out prototype properties when appending attributes. See [#31](https://github.com/oozcitak/xmlbuilder-js/issues/31). + +## [0.4.2] - 2012-09-14 +- Removed README.md from `.npmignore`. + +## [0.4.1] - 2012-08-31 +- Removed `begin()` calls in favor of `XMLBuilder` constructor. +- Added the `end()` function. `end()` is a convenience over `doc().toString()`. + +## [0.4.0] - 2012-08-31 +- Added arguments to `XMLBuilder` constructor to allow the name of the root element and XML prolog to be defined in one line. +- Soft deprecated `begin()`. + +## [0.3.11] - 2012-08-13 +- Package keywords are fixed to be an array of values. + +## [0.3.10] - 2012-08-13 +- Brought back npm package contents which were lost due to incorrect configuration of `package.json` in previous releases. + +## [0.3.3] - 2012-07-27 +- Implemented `importXMLBuilder()`. + +## [0.3.2] - 2012-07-20 +- Fixed a duplicated escaping problem on `element()`. +- Fixed a problem with text node creation from empty string. +- Calling `root()` on the document element returns the root element. +- `XMLBuilder` no longer extends `XMLFragment`. + +## [0.3.1] - 2011-11-28 +- Added guards for document element so that nodes cannot be inserted at document level. + +## [0.3.0] - 2011-11-28 +- Added `doc()` to return the document element. + +## [0.2.2] - 2011-11-28 +- Prevent code relying on `up()`'s older behavior to break. + +## [0.2.1] - 2011-11-28 +- Added the `root()` function. + +## [0.2.0] - 2011-11-21 +- Added Travis-CI integration. +- Added coffee-script dependency. +- Added insert, traversal and delete functions. + +## [0.1.7] - 2011-10-25 +- No changes. Accidental release. + +## [0.1.6] - 2011-10-25 +- Corrected `package.json` bugs link to `url` from `web`. + +## [0.1.5] - 2011-08-08 +- Added missing npm package contents. + +## [0.1.4] - 2011-07-29 +- Text-only nodes are no longer indented. +- Added documentation for multiple instances. + +## [0.1.3] - 2011-07-27 +- Exported the `create()` function to return a new instance. This allows multiple builder instances to be constructed. +- Fixed `u()` function so that it now correctly calls `up()`. +- Fixed typo in `element()` so that `attributes` and `text` can be passed interchangeably. + +## [0.1.2] - 2011-06-03 +- `ele()` accepts element text. +- `attributes()` now overrides existing attributes if passed the same attribute name. + +## [0.1.1] - 2011-05-19 +- Added the raw output option. +- Removed most validity checks. + +## [0.1.0] - 2011-04-27 +- `text()` and `cdata()` now return parent element. +- Attribute values are escaped. + +## [0.0.7] - 2011-04-23 +- Coerced text values to string. + +## [0.0.6] - 2011-02-23 +- Added support for XML comments. +- Text nodes are checked against CharData. + +## [0.0.5] - 2010-12-31 +- Corrected the name of the main npm module in `package.json`. + +## [0.0.4] - 2010-12-28 +- Added `.npmignore`. + +## [0.0.3] - 2010-12-27 +- root element is now constructed in `begin()`. +- moved prolog to `begin()`. +- Added the ability to have CDATA in element text. +- Removed unused prolog aliases. +- Removed `builder()` function from main module. +- Added the name of the main npm module in `package.json`. + +## [0.0.2] - 2010-11-03 +- `element()` expands nested arrays. +- Added pretty printing. +- Added the `up()`, `build()` and `prolog()` functions. +- Added readme. + +## 0.0.1 - 2010-11-02 +- Initial release + +[11.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.1.1...v11.0.0 +[10.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.1.0...v10.1.1 +[10.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v10.0.0...v10.1.0 +[10.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.7...v10.0.0 +[9.0.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.4...v9.0.7 +[9.0.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.3...v9.0.4 +[9.0.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.2...v9.0.3 +[9.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.1...v9.0.2 +[9.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v9.0.0...v9.0.1 +[9.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.2...v9.0.0 +[8.2.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.1...v8.2.2 +[8.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.2.0...v8.2.1 +[8.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.1.0...v8.2.0 +[8.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v8.0.0...v8.1.0 +[8.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v7.0.0...v8.0.0 +[7.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v6.0.0...v7.0.0 +[6.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v5.0.1...v6.0.0 +[5.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v5.0.0...v5.0.1 +[5.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.2.1...v5.0.0 +[4.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.2.0...v4.2.1 +[4.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.1.0...v4.2.0 +[4.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v4.0.0...v4.1.0 +[4.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v3.1.0...v4.0.0 +[3.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v3.0.0...v3.1.0 +[3.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.5...v3.0.0 +[2.6.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.4...v2.6.5 +[2.6.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.3...v2.6.4 +[2.6.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.2...v2.6.3 +[2.6.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.1...v2.6.2 +[2.6.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.6.0...v2.6.1 +[2.6.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.2...v2.6.0 +[2.5.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.1...v2.5.2 +[2.5.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.5.0...v2.5.1 +[2.5.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.6...v2.5.0 +[2.4.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.5...v2.4.6 +[2.4.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.4...v2.4.5 +[2.4.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.3...v2.4.4 +[2.4.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.2...v2.4.3 +[2.4.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.1...v2.4.2 +[2.4.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.4.0...v2.4.1 +[2.4.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.3.0...v2.4.0 +[2.3.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.2.1...v2.3.0 +[2.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.2.0...v2.2.1 +[2.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.1.0...v2.2.0 +[2.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.0.1...v2.1.0 +[2.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v2.0.0...v2.0.1 +[2.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.2...v2.0.0 +[1.1.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.1...v1.1.2 +[1.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.1.0...v1.1.1 +[1.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.2...v1.1.0 +[1.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.1...v1.0.2 +[1.0.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.3...v1.0.0 +[0.4.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.2...v0.4.3 +[0.4.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.1...v0.4.2 +[0.4.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.11...v0.4.0 +[0.3.11]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.10...v0.3.11 +[0.3.10]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.3...v0.3.10 +[0.3.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.2...v0.3.3 +[0.3.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.1...v0.3.2 +[0.3.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.3.0...v0.3.1 +[0.3.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.2...v0.3.0 +[0.2.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.1...v0.2.2 +[0.2.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.2.0...v0.2.1 +[0.2.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.7...v0.2.0 +[0.1.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.6...v0.1.7 +[0.1.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.5...v0.1.6 +[0.1.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.4...v0.1.5 +[0.1.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.3...v0.1.4 +[0.1.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.2...v0.1.3 +[0.1.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.1...v0.1.2 +[0.1.1]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.1.0...v0.1.1 +[0.1.0]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.7...v0.1.0 +[0.0.7]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.6...v0.0.7 +[0.0.6]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.5...v0.0.6 +[0.0.5]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.4...v0.0.5 +[0.0.4]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.3...v0.0.4 +[0.0.3]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.2...v0.0.3 +[0.0.2]: https://github.com/oozcitak/xmlbuilder-js/compare/v0.0.1...v0.0.2 + diff --git a/node_modules/xmlbuilder/LICENSE b/node_modules/xmlbuilder/LICENSE new file mode 100644 index 00000000..9fb97002 --- /dev/null +++ b/node_modules/xmlbuilder/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Ozgur Ozcitak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xmlbuilder/README.md b/node_modules/xmlbuilder/README.md new file mode 100644 index 00000000..1a96edb5 --- /dev/null +++ b/node_modules/xmlbuilder/README.md @@ -0,0 +1,86 @@ +# xmlbuilder-js + +An XML builder for [node.js](https://nodejs.org/) similar to +[java-xmlbuilder](https://github.com/jmurty/java-xmlbuilder). + +[![License](http://img.shields.io/npm/l/xmlbuilder.svg?style=flat-square)](http://opensource.org/licenses/MIT) +[![NPM Version](http://img.shields.io/npm/v/xmlbuilder.svg?style=flat-square)](https://npmjs.com/package/xmlbuilder) +[![NPM Downloads](https://img.shields.io/npm/dm/xmlbuilder.svg?style=flat-square)](https://npmjs.com/package/xmlbuilder) + +[![Travis Build Status](http://img.shields.io/travis/oozcitak/xmlbuilder-js.svg?style=flat-square)](http://travis-ci.org/oozcitak/xmlbuilder-js) +[![AppVeyor Build status](https://ci.appveyor.com/api/projects/status/bf7odb20hj77isry?svg=true)](https://ci.appveyor.com/project/oozcitak/xmlbuilder-js) +[![Dev Dependency Status](http://img.shields.io/david/dev/oozcitak/xmlbuilder-js.svg?style=flat-square)](https://david-dm.org/oozcitak/xmlbuilder-js) +[![Code Coverage](https://img.shields.io/coveralls/oozcitak/xmlbuilder-js.svg?style=flat-square)](https://coveralls.io/github/oozcitak/xmlbuilder-js) + +### Installation: + +``` sh +npm install xmlbuilder +``` + +### Usage: + +``` js +var builder = require('xmlbuilder'); +var xml = builder.create('root') + .ele('xmlbuilder') + .ele('repo', {'type': 'git'}, 'git://github.com/oozcitak/xmlbuilder-js.git') + .end({ pretty: true}); + +console.log(xml); +``` + +will result in: + +``` xml + + + + git://github.com/oozcitak/xmlbuilder-js.git + + +``` + +It is also possible to convert objects into nodes: + +``` js +builder.create({ + root: { + xmlbuilder: { + repo: { + '@type': 'git', // attributes start with @ + '#text': 'git://github.com/oozcitak/xmlbuilder-js.git' // text node + } + } + } +}); +``` + +If you need to do some processing: + +``` js +var root = builder.create('squares'); +root.com('f(x) = x^2'); +for(var i = 1; i <= 5; i++) +{ + var item = root.ele('data'); + item.att('x', i); + item.att('y', i * i); +} +``` + +This will result in: + +``` xml + + + + + + + + + +``` + +See the [wiki](https://github.com/oozcitak/xmlbuilder-js/wiki) for details and [examples](https://github.com/oozcitak/xmlbuilder-js/wiki/Examples) for more complex examples. diff --git a/node_modules/xmlbuilder/appveyor.yml b/node_modules/xmlbuilder/appveyor.yml new file mode 100644 index 00000000..9604b785 --- /dev/null +++ b/node_modules/xmlbuilder/appveyor.yml @@ -0,0 +1,20 @@ +environment: + matrix: + - nodejs_version: "4" + - nodejs_version: "5" + - nodejs_version: "6" + - nodejs_version: "8" + - nodejs_version: "10" + - nodejs_version: "" # latest + +install: + - ps: "Install-Product node $env:nodejs_version" + - "npm install" + +test_script: + - "node --version" + - "npm --version" + - "npm test" + +build: off + diff --git a/node_modules/xmlbuilder/lib/Derivation.js b/node_modules/xmlbuilder/lib/Derivation.js new file mode 100644 index 00000000..2abfd08d --- /dev/null +++ b/node_modules/xmlbuilder/lib/Derivation.js @@ -0,0 +1,10 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Restriction: 1, + Extension: 2, + Union: 4, + List: 8 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/DocumentPosition.js b/node_modules/xmlbuilder/lib/DocumentPosition.js new file mode 100644 index 00000000..1cbd21c1 --- /dev/null +++ b/node_modules/xmlbuilder/lib/DocumentPosition.js @@ -0,0 +1,12 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Disconnected: 1, + Preceding: 2, + Following: 4, + Contains: 8, + ContainedBy: 16, + ImplementationSpecific: 32 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/NodeType.js b/node_modules/xmlbuilder/lib/NodeType.js new file mode 100644 index 00000000..4c200e3c --- /dev/null +++ b/node_modules/xmlbuilder/lib/NodeType.js @@ -0,0 +1,23 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Element: 1, + Attribute: 2, + Text: 3, + CData: 4, + EntityReference: 5, + EntityDeclaration: 6, + ProcessingInstruction: 7, + Comment: 8, + Document: 9, + DocType: 10, + DocumentFragment: 11, + NotationDeclaration: 12, + Declaration: 201, + Raw: 202, + AttributeDeclaration: 203, + ElementDeclaration: 204, + Dummy: 205 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/OperationType.js b/node_modules/xmlbuilder/lib/OperationType.js new file mode 100644 index 00000000..29428f61 --- /dev/null +++ b/node_modules/xmlbuilder/lib/OperationType.js @@ -0,0 +1,11 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + Clones: 1, + Imported: 2, + Deleted: 3, + Renamed: 4, + Adopted: 5 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/Utility.js b/node_modules/xmlbuilder/lib/Utility.js new file mode 100644 index 00000000..1d42cfd3 --- /dev/null +++ b/node_modules/xmlbuilder/lib/Utility.js @@ -0,0 +1,83 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var assign, getValue, isArray, isEmpty, isFunction, isObject, isPlainObject, + slice = [].slice, + hasProp = {}.hasOwnProperty; + + assign = function() { + var i, key, len, source, sources, target; + target = arguments[0], sources = 2 <= arguments.length ? slice.call(arguments, 1) : []; + if (isFunction(Object.assign)) { + Object.assign.apply(null, arguments); + } else { + for (i = 0, len = sources.length; i < len; i++) { + source = sources[i]; + if (source != null) { + for (key in source) { + if (!hasProp.call(source, key)) continue; + target[key] = source[key]; + } + } + } + } + return target; + }; + + isFunction = function(val) { + return !!val && Object.prototype.toString.call(val) === '[object Function]'; + }; + + isObject = function(val) { + var ref; + return !!val && ((ref = typeof val) === 'function' || ref === 'object'); + }; + + isArray = function(val) { + if (isFunction(Array.isArray)) { + return Array.isArray(val); + } else { + return Object.prototype.toString.call(val) === '[object Array]'; + } + }; + + isEmpty = function(val) { + var key; + if (isArray(val)) { + return !val.length; + } else { + for (key in val) { + if (!hasProp.call(val, key)) continue; + return false; + } + return true; + } + }; + + isPlainObject = function(val) { + var ctor, proto; + return isObject(val) && (proto = Object.getPrototypeOf(val)) && (ctor = proto.constructor) && (typeof ctor === 'function') && (ctor instanceof ctor) && (Function.prototype.toString.call(ctor) === Function.prototype.toString.call(Object)); + }; + + getValue = function(obj) { + if (isFunction(obj.valueOf)) { + return obj.valueOf(); + } else { + return obj; + } + }; + + module.exports.assign = assign; + + module.exports.isFunction = isFunction; + + module.exports.isObject = isObject; + + module.exports.isArray = isArray; + + module.exports.isEmpty = isEmpty; + + module.exports.isPlainObject = isPlainObject; + + module.exports.getValue = getValue; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/WriterState.js b/node_modules/xmlbuilder/lib/WriterState.js new file mode 100644 index 00000000..0923eec9 --- /dev/null +++ b/node_modules/xmlbuilder/lib/WriterState.js @@ -0,0 +1,10 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + module.exports = { + None: 0, + OpenTag: 1, + InsideTag: 2, + CloseTag: 3 + }; + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLAttribute.js b/node_modules/xmlbuilder/lib/XMLAttribute.js new file mode 100644 index 00000000..c208566d --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLAttribute.js @@ -0,0 +1,108 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLAttribute, XMLNode; + + NodeType = require('./NodeType'); + + XMLNode = require('./XMLNode'); + + module.exports = XMLAttribute = (function() { + function XMLAttribute(parent, name, value) { + this.parent = parent; + if (this.parent) { + this.options = this.parent.options; + this.stringify = this.parent.stringify; + } + if (name == null) { + throw new Error("Missing attribute name. " + this.debugInfo(name)); + } + this.name = this.stringify.name(name); + this.value = this.stringify.attValue(value); + this.type = NodeType.Attribute; + this.isId = false; + this.schemaTypeInfo = null; + } + + Object.defineProperty(XMLAttribute.prototype, 'nodeType', { + get: function() { + return this.type; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'ownerElement', { + get: function() { + return this.parent; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'textContent', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'namespaceURI', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'prefix', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'localName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLAttribute.prototype, 'specified', { + get: function() { + return true; + } + }); + + XMLAttribute.prototype.clone = function() { + return Object.create(this); + }; + + XMLAttribute.prototype.toString = function(options) { + return this.options.writer.attribute(this, this.options.writer.filterOptions(options)); + }; + + XMLAttribute.prototype.debugInfo = function(name) { + name = name || this.name; + if (name == null) { + return "parent: <" + this.parent.name + ">"; + } else { + return "attribute: {" + name + "}, parent: <" + this.parent.name + ">"; + } + }; + + XMLAttribute.prototype.isEqualNode = function(node) { + if (node.namespaceURI !== this.namespaceURI) { + return false; + } + if (node.prefix !== this.prefix) { + return false; + } + if (node.localName !== this.localName) { + return false; + } + if (node.value !== this.value) { + return false; + } + return true; + }; + + return XMLAttribute; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLCData.js b/node_modules/xmlbuilder/lib/XMLCData.js new file mode 100644 index 00000000..c732ec5b --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLCData.js @@ -0,0 +1,36 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCData, XMLCharacterData, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLCData = (function(superClass) { + extend(XMLCData, superClass); + + function XMLCData(parent, text) { + XMLCData.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing CDATA text. " + this.debugInfo()); + } + this.name = "#cdata-section"; + this.type = NodeType.CData; + this.value = this.stringify.cdata(text); + } + + XMLCData.prototype.clone = function() { + return Object.create(this); + }; + + XMLCData.prototype.toString = function(options) { + return this.options.writer.cdata(this, this.options.writer.filterOptions(options)); + }; + + return XMLCData; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLCharacterData.js b/node_modules/xmlbuilder/lib/XMLCharacterData.js new file mode 100644 index 00000000..c007a181 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLCharacterData.js @@ -0,0 +1,79 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLCharacterData, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + module.exports = XMLCharacterData = (function(superClass) { + extend(XMLCharacterData, superClass); + + function XMLCharacterData(parent) { + XMLCharacterData.__super__.constructor.call(this, parent); + this.value = ''; + } + + Object.defineProperty(XMLCharacterData.prototype, 'data', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'length', { + get: function() { + return this.value.length; + } + }); + + Object.defineProperty(XMLCharacterData.prototype, 'textContent', { + get: function() { + return this.value; + }, + set: function(value) { + return this.value = value || ''; + } + }); + + XMLCharacterData.prototype.clone = function() { + return Object.create(this); + }; + + XMLCharacterData.prototype.substringData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.appendData = function(arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.insertData = function(offset, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.deleteData = function(offset, count) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.replaceData = function(offset, count, arg) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLCharacterData.prototype.isEqualNode = function(node) { + if (!XMLCharacterData.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.data !== this.data) { + return false; + } + return true; + }; + + return XMLCharacterData; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLComment.js b/node_modules/xmlbuilder/lib/XMLComment.js new file mode 100644 index 00000000..82872168 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLComment.js @@ -0,0 +1,36 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCharacterData, XMLComment, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLComment = (function(superClass) { + extend(XMLComment, superClass); + + function XMLComment(parent, text) { + XMLComment.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing comment text. " + this.debugInfo()); + } + this.name = "#comment"; + this.type = NodeType.Comment; + this.value = this.stringify.comment(text); + } + + XMLComment.prototype.clone = function() { + return Object.create(this); + }; + + XMLComment.prototype.toString = function(options) { + return this.options.writer.comment(this, this.options.writer.filterOptions(options)); + }; + + return XMLComment; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js b/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js new file mode 100644 index 00000000..b331b86f --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMConfiguration.js @@ -0,0 +1,64 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMConfiguration, XMLDOMErrorHandler, XMLDOMStringList; + + XMLDOMErrorHandler = require('./XMLDOMErrorHandler'); + + XMLDOMStringList = require('./XMLDOMStringList'); + + module.exports = XMLDOMConfiguration = (function() { + function XMLDOMConfiguration() { + var clonedSelf; + this.defaultParams = { + "canonical-form": false, + "cdata-sections": false, + "comments": false, + "datatype-normalization": false, + "element-content-whitespace": true, + "entities": true, + "error-handler": new XMLDOMErrorHandler(), + "infoset": true, + "validate-if-schema": false, + "namespaces": true, + "namespace-declarations": true, + "normalize-characters": false, + "schema-location": '', + "schema-type": '', + "split-cdata-sections": true, + "validate": false, + "well-formed": true + }; + this.params = clonedSelf = Object.create(this.defaultParams); + } + + Object.defineProperty(XMLDOMConfiguration.prototype, 'parameterNames', { + get: function() { + return new XMLDOMStringList(Object.keys(this.defaultParams)); + } + }); + + XMLDOMConfiguration.prototype.getParameter = function(name) { + if (this.params.hasOwnProperty(name)) { + return this.params[name]; + } else { + return null; + } + }; + + XMLDOMConfiguration.prototype.canSetParameter = function(name, value) { + return true; + }; + + XMLDOMConfiguration.prototype.setParameter = function(name, value) { + if (value != null) { + return this.params[name] = value; + } else { + return delete this.params[name]; + } + }; + + return XMLDOMConfiguration; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js b/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js new file mode 100644 index 00000000..4a0446c0 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js @@ -0,0 +1,16 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMErrorHandler; + + module.exports = XMLDOMErrorHandler = (function() { + function XMLDOMErrorHandler() {} + + XMLDOMErrorHandler.prototype.handleError = function(error) { + throw new Error(error); + }; + + return XMLDOMErrorHandler; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMImplementation.js b/node_modules/xmlbuilder/lib/XMLDOMImplementation.js new file mode 100644 index 00000000..4f9f9db1 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMImplementation.js @@ -0,0 +1,32 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMImplementation; + + module.exports = XMLDOMImplementation = (function() { + function XMLDOMImplementation() {} + + XMLDOMImplementation.prototype.hasFeature = function(feature, version) { + return true; + }; + + XMLDOMImplementation.prototype.createDocumentType = function(qualifiedName, publicId, systemId) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createDocument = function(namespaceURI, qualifiedName, doctype) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.createHTMLDocument = function(title) { + throw new Error("This DOM method is not implemented."); + }; + + XMLDOMImplementation.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLDOMImplementation; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDOMStringList.js b/node_modules/xmlbuilder/lib/XMLDOMStringList.js new file mode 100644 index 00000000..ba558c5b --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDOMStringList.js @@ -0,0 +1,28 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLDOMStringList; + + module.exports = XMLDOMStringList = (function() { + function XMLDOMStringList(arr) { + this.arr = arr || []; + } + + Object.defineProperty(XMLDOMStringList.prototype, 'length', { + get: function() { + return this.arr.length; + } + }); + + XMLDOMStringList.prototype.item = function(index) { + return this.arr[index] || null; + }; + + XMLDOMStringList.prototype.contains = function(str) { + return this.arr.indexOf(str) !== -1; + }; + + return XMLDOMStringList; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDAttList.js b/node_modules/xmlbuilder/lib/XMLDTDAttList.js new file mode 100644 index 00000000..aca9dbd4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDAttList.js @@ -0,0 +1,55 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDAttList, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDAttList = (function(superClass) { + extend(XMLDTDAttList, superClass); + + function XMLDTDAttList(parent, elementName, attributeName, attributeType, defaultValueType, defaultValue) { + XMLDTDAttList.__super__.constructor.call(this, parent); + if (elementName == null) { + throw new Error("Missing DTD element name. " + this.debugInfo()); + } + if (attributeName == null) { + throw new Error("Missing DTD attribute name. " + this.debugInfo(elementName)); + } + if (!attributeType) { + throw new Error("Missing DTD attribute type. " + this.debugInfo(elementName)); + } + if (!defaultValueType) { + throw new Error("Missing DTD attribute default. " + this.debugInfo(elementName)); + } + if (defaultValueType.indexOf('#') !== 0) { + defaultValueType = '#' + defaultValueType; + } + if (!defaultValueType.match(/^(#REQUIRED|#IMPLIED|#FIXED|#DEFAULT)$/)) { + throw new Error("Invalid default value type; expected: #REQUIRED, #IMPLIED, #FIXED or #DEFAULT. " + this.debugInfo(elementName)); + } + if (defaultValue && !defaultValueType.match(/^(#FIXED|#DEFAULT)$/)) { + throw new Error("Default value only applies to #FIXED or #DEFAULT. " + this.debugInfo(elementName)); + } + this.elementName = this.stringify.name(elementName); + this.type = NodeType.AttributeDeclaration; + this.attributeName = this.stringify.name(attributeName); + this.attributeType = this.stringify.dtdAttType(attributeType); + if (defaultValue) { + this.defaultValue = this.stringify.dtdAttDefault(defaultValue); + } + this.defaultValueType = defaultValueType; + } + + XMLDTDAttList.prototype.toString = function(options) { + return this.options.writer.dtdAttList(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDAttList; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDElement.js b/node_modules/xmlbuilder/lib/XMLDTDElement.js new file mode 100644 index 00000000..f8f1ae76 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDElement.js @@ -0,0 +1,38 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDElement, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDElement = (function(superClass) { + extend(XMLDTDElement, superClass); + + function XMLDTDElement(parent, name, value) { + XMLDTDElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD element name. " + this.debugInfo()); + } + if (!value) { + value = '(#PCDATA)'; + } + if (Array.isArray(value)) { + value = '(' + value.join(',') + ')'; + } + this.name = this.stringify.name(name); + this.type = NodeType.ElementDeclaration; + this.value = this.stringify.dtdElementValue(value); + } + + XMLDTDElement.prototype.toString = function(options) { + return this.options.writer.dtdElement(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDElement; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDEntity.js b/node_modules/xmlbuilder/lib/XMLDTDEntity.js new file mode 100644 index 00000000..0a940d6e --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDEntity.js @@ -0,0 +1,97 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDEntity, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDEntity = (function(superClass) { + extend(XMLDTDEntity, superClass); + + function XMLDTDEntity(parent, pe, name, value) { + XMLDTDEntity.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD entity name. " + this.debugInfo(name)); + } + if (value == null) { + throw new Error("Missing DTD entity value. " + this.debugInfo(name)); + } + this.pe = !!pe; + this.name = this.stringify.name(name); + this.type = NodeType.EntityDeclaration; + if (!isObject(value)) { + this.value = this.stringify.dtdEntityValue(value); + this.internal = true; + } else { + if (!value.pubID && !value.sysID) { + throw new Error("Public and/or system identifiers are required for an external entity. " + this.debugInfo(name)); + } + if (value.pubID && !value.sysID) { + throw new Error("System identifier is required for a public external entity. " + this.debugInfo(name)); + } + this.internal = false; + if (value.pubID != null) { + this.pubID = this.stringify.dtdPubID(value.pubID); + } + if (value.sysID != null) { + this.sysID = this.stringify.dtdSysID(value.sysID); + } + if (value.nData != null) { + this.nData = this.stringify.dtdNData(value.nData); + } + if (this.pe && this.nData) { + throw new Error("Notation declaration is not allowed in a parameter entity. " + this.debugInfo(name)); + } + } + } + + Object.defineProperty(XMLDTDEntity.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'notationName', { + get: function() { + return this.nData || null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'inputEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'xmlEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDTDEntity.prototype, 'xmlVersion', { + get: function() { + return null; + } + }); + + XMLDTDEntity.prototype.toString = function(options) { + return this.options.writer.dtdEntity(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDEntity; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDTDNotation.js b/node_modules/xmlbuilder/lib/XMLDTDNotation.js new file mode 100644 index 00000000..57a119d2 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDTDNotation.js @@ -0,0 +1,52 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDNotation, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDTDNotation = (function(superClass) { + extend(XMLDTDNotation, superClass); + + function XMLDTDNotation(parent, name, value) { + XMLDTDNotation.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing DTD notation name. " + this.debugInfo(name)); + } + if (!value.pubID && !value.sysID) { + throw new Error("Public or system identifiers are required for an external entity. " + this.debugInfo(name)); + } + this.name = this.stringify.name(name); + this.type = NodeType.NotationDeclaration; + if (value.pubID != null) { + this.pubID = this.stringify.dtdPubID(value.pubID); + } + if (value.sysID != null) { + this.sysID = this.stringify.dtdSysID(value.sysID); + } + } + + Object.defineProperty(XMLDTDNotation.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDTDNotation.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + XMLDTDNotation.prototype.toString = function(options) { + return this.options.writer.dtdNotation(this, this.options.writer.filterOptions(options)); + }; + + return XMLDTDNotation; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDeclaration.js b/node_modules/xmlbuilder/lib/XMLDeclaration.js new file mode 100644 index 00000000..d4f7f447 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDeclaration.js @@ -0,0 +1,43 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDeclaration, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDeclaration = (function(superClass) { + extend(XMLDeclaration, superClass); + + function XMLDeclaration(parent, version, encoding, standalone) { + var ref; + XMLDeclaration.__super__.constructor.call(this, parent); + if (isObject(version)) { + ref = version, version = ref.version, encoding = ref.encoding, standalone = ref.standalone; + } + if (!version) { + version = '1.0'; + } + this.type = NodeType.Declaration; + this.version = this.stringify.xmlVersion(version); + if (encoding != null) { + this.encoding = this.stringify.xmlEncoding(encoding); + } + if (standalone != null) { + this.standalone = this.stringify.xmlStandalone(standalone); + } + } + + XMLDeclaration.prototype.toString = function(options) { + return this.options.writer.declaration(this, this.options.writer.filterOptions(options)); + }; + + return XMLDeclaration; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocType.js b/node_modules/xmlbuilder/lib/XMLDocType.js new file mode 100644 index 00000000..ef043f4c --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocType.js @@ -0,0 +1,186 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDocType, XMLNamedNodeMap, XMLNode, isObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isObject = require('./Utility').isObject; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLDTDAttList = require('./XMLDTDAttList'); + + XMLDTDEntity = require('./XMLDTDEntity'); + + XMLDTDElement = require('./XMLDTDElement'); + + XMLDTDNotation = require('./XMLDTDNotation'); + + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + + module.exports = XMLDocType = (function(superClass) { + extend(XMLDocType, superClass); + + function XMLDocType(parent, pubID, sysID) { + var child, i, len, ref, ref1, ref2; + XMLDocType.__super__.constructor.call(this, parent); + this.type = NodeType.DocType; + if (parent.children) { + ref = parent.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.Element) { + this.name = child.name; + break; + } + } + } + this.documentObject = parent; + if (isObject(pubID)) { + ref1 = pubID, pubID = ref1.pubID, sysID = ref1.sysID; + } + if (sysID == null) { + ref2 = [pubID, sysID], sysID = ref2[0], pubID = ref2[1]; + } + if (pubID != null) { + this.pubID = this.stringify.dtdPubID(pubID); + } + if (sysID != null) { + this.sysID = this.stringify.dtdSysID(sysID); + } + } + + Object.defineProperty(XMLDocType.prototype, 'entities', { + get: function() { + var child, i, len, nodes, ref; + nodes = {}; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if ((child.type === NodeType.EntityDeclaration) && !child.pe) { + nodes[child.name] = child; + } + } + return new XMLNamedNodeMap(nodes); + } + }); + + Object.defineProperty(XMLDocType.prototype, 'notations', { + get: function() { + var child, i, len, nodes, ref; + nodes = {}; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.NotationDeclaration) { + nodes[child.name] = child; + } + } + return new XMLNamedNodeMap(nodes); + } + }); + + Object.defineProperty(XMLDocType.prototype, 'publicId', { + get: function() { + return this.pubID; + } + }); + + Object.defineProperty(XMLDocType.prototype, 'systemId', { + get: function() { + return this.sysID; + } + }); + + Object.defineProperty(XMLDocType.prototype, 'internalSubset', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + XMLDocType.prototype.element = function(name, value) { + var child; + child = new XMLDTDElement(this, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.attList = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + var child; + child = new XMLDTDAttList(this, elementName, attributeName, attributeType, defaultValueType, defaultValue); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.entity = function(name, value) { + var child; + child = new XMLDTDEntity(this, false, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.pEntity = function(name, value) { + var child; + child = new XMLDTDEntity(this, true, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.notation = function(name, value) { + var child; + child = new XMLDTDNotation(this, name, value); + this.children.push(child); + return this; + }; + + XMLDocType.prototype.toString = function(options) { + return this.options.writer.docType(this, this.options.writer.filterOptions(options)); + }; + + XMLDocType.prototype.ele = function(name, value) { + return this.element(name, value); + }; + + XMLDocType.prototype.att = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + return this.attList(elementName, attributeName, attributeType, defaultValueType, defaultValue); + }; + + XMLDocType.prototype.ent = function(name, value) { + return this.entity(name, value); + }; + + XMLDocType.prototype.pent = function(name, value) { + return this.pEntity(name, value); + }; + + XMLDocType.prototype.not = function(name, value) { + return this.notation(name, value); + }; + + XMLDocType.prototype.up = function() { + return this.root() || this.documentObject; + }; + + XMLDocType.prototype.isEqualNode = function(node) { + if (!XMLDocType.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.name !== this.name) { + return false; + } + if (node.publicId !== this.publicId) { + return false; + } + if (node.systemId !== this.systemId) { + return false; + } + return true; + }; + + return XMLDocType; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocument.js b/node_modules/xmlbuilder/lib/XMLDocument.js new file mode 100644 index 00000000..88df56c4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocument.js @@ -0,0 +1,242 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDOMConfiguration, XMLDOMImplementation, XMLDocument, XMLNode, XMLStringWriter, XMLStringifier, isPlainObject, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + isPlainObject = require('./Utility').isPlainObject; + + XMLDOMImplementation = require('./XMLDOMImplementation'); + + XMLDOMConfiguration = require('./XMLDOMConfiguration'); + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLStringifier = require('./XMLStringifier'); + + XMLStringWriter = require('./XMLStringWriter'); + + module.exports = XMLDocument = (function(superClass) { + extend(XMLDocument, superClass); + + function XMLDocument(options) { + XMLDocument.__super__.constructor.call(this, null); + this.name = "#document"; + this.type = NodeType.Document; + this.documentURI = null; + this.domConfig = new XMLDOMConfiguration(); + options || (options = {}); + if (!options.writer) { + options.writer = new XMLStringWriter(); + } + this.options = options; + this.stringify = new XMLStringifier(options); + } + + Object.defineProperty(XMLDocument.prototype, 'implementation', { + value: new XMLDOMImplementation() + }); + + Object.defineProperty(XMLDocument.prototype, 'doctype', { + get: function() { + var child, i, len, ref; + ref = this.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + if (child.type === NodeType.DocType) { + return child; + } + } + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'documentElement', { + get: function() { + return this.rootObject || null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'inputEncoding', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'strictErrorChecking', { + get: function() { + return false; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlEncoding', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].encoding; + } else { + return null; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlStandalone', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].standalone === 'yes'; + } else { + return false; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'xmlVersion', { + get: function() { + if (this.children.length !== 0 && this.children[0].type === NodeType.Declaration) { + return this.children[0].version; + } else { + return "1.0"; + } + } + }); + + Object.defineProperty(XMLDocument.prototype, 'URL', { + get: function() { + return this.documentURI; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'origin', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'compatMode', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'characterSet', { + get: function() { + return null; + } + }); + + Object.defineProperty(XMLDocument.prototype, 'contentType', { + get: function() { + return null; + } + }); + + XMLDocument.prototype.end = function(writer) { + var writerOptions; + writerOptions = {}; + if (!writer) { + writer = this.options.writer; + } else if (isPlainObject(writer)) { + writerOptions = writer; + writer = this.options.writer; + } + return writer.document(this, writer.filterOptions(writerOptions)); + }; + + XMLDocument.prototype.toString = function(options) { + return this.options.writer.document(this, this.options.writer.filterOptions(options)); + }; + + XMLDocument.prototype.createElement = function(tagName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createDocumentFragment = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTextNode = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createComment = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createCDATASection = function(data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createProcessingInstruction = function(target, data) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttribute = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEntityReference = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.importNode = function(importedNode, deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createElementNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createAttributeNS = function(namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementById = function(elementId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.adoptNode = function(source) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.normalizeDocument = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.renameNode = function(node, namespaceURI, qualifiedName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createEvent = function(eventInterface) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createRange = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createNodeIterator = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLDocument.prototype.createTreeWalker = function(root, whatToShow, filter) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + return XMLDocument; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocumentCB.js b/node_modules/xmlbuilder/lib/XMLDocumentCB.js new file mode 100644 index 00000000..ca1aa1cf --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocumentCB.js @@ -0,0 +1,528 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLAttribute, XMLCData, XMLComment, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDeclaration, XMLDocType, XMLDocument, XMLDocumentCB, XMLElement, XMLProcessingInstruction, XMLRaw, XMLStringWriter, XMLStringifier, XMLText, getValue, isFunction, isObject, isPlainObject, ref, + hasProp = {}.hasOwnProperty; + + ref = require('./Utility'), isObject = ref.isObject, isFunction = ref.isFunction, isPlainObject = ref.isPlainObject, getValue = ref.getValue; + + NodeType = require('./NodeType'); + + XMLDocument = require('./XMLDocument'); + + XMLElement = require('./XMLElement'); + + XMLCData = require('./XMLCData'); + + XMLComment = require('./XMLComment'); + + XMLRaw = require('./XMLRaw'); + + XMLText = require('./XMLText'); + + XMLProcessingInstruction = require('./XMLProcessingInstruction'); + + XMLDeclaration = require('./XMLDeclaration'); + + XMLDocType = require('./XMLDocType'); + + XMLDTDAttList = require('./XMLDTDAttList'); + + XMLDTDEntity = require('./XMLDTDEntity'); + + XMLDTDElement = require('./XMLDTDElement'); + + XMLDTDNotation = require('./XMLDTDNotation'); + + XMLAttribute = require('./XMLAttribute'); + + XMLStringifier = require('./XMLStringifier'); + + XMLStringWriter = require('./XMLStringWriter'); + + WriterState = require('./WriterState'); + + module.exports = XMLDocumentCB = (function() { + function XMLDocumentCB(options, onData, onEnd) { + var writerOptions; + this.name = "?xml"; + this.type = NodeType.Document; + options || (options = {}); + writerOptions = {}; + if (!options.writer) { + options.writer = new XMLStringWriter(); + } else if (isPlainObject(options.writer)) { + writerOptions = options.writer; + options.writer = new XMLStringWriter(); + } + this.options = options; + this.writer = options.writer; + this.writerOptions = this.writer.filterOptions(writerOptions); + this.stringify = new XMLStringifier(options); + this.onDataCallback = onData || function() {}; + this.onEndCallback = onEnd || function() {}; + this.currentNode = null; + this.currentLevel = -1; + this.openTags = {}; + this.documentStarted = false; + this.documentCompleted = false; + this.root = null; + } + + XMLDocumentCB.prototype.createChildNode = function(node) { + var att, attName, attributes, child, i, len, ref1, ref2; + switch (node.type) { + case NodeType.CData: + this.cdata(node.value); + break; + case NodeType.Comment: + this.comment(node.value); + break; + case NodeType.Element: + attributes = {}; + ref1 = node.attribs; + for (attName in ref1) { + if (!hasProp.call(ref1, attName)) continue; + att = ref1[attName]; + attributes[attName] = att.value; + } + this.node(node.name, attributes); + break; + case NodeType.Dummy: + this.dummy(); + break; + case NodeType.Raw: + this.raw(node.value); + break; + case NodeType.Text: + this.text(node.value); + break; + case NodeType.ProcessingInstruction: + this.instruction(node.target, node.value); + break; + default: + throw new Error("This XML node type is not supported in a JS object: " + node.constructor.name); + } + ref2 = node.children; + for (i = 0, len = ref2.length; i < len; i++) { + child = ref2[i]; + this.createChildNode(child); + if (child.type === NodeType.Element) { + this.up(); + } + } + return this; + }; + + XMLDocumentCB.prototype.dummy = function() { + return this; + }; + + XMLDocumentCB.prototype.node = function(name, attributes, text) { + var ref1; + if (name == null) { + throw new Error("Missing node name."); + } + if (this.root && this.currentLevel === -1) { + throw new Error("Document can only have one root node. " + this.debugInfo(name)); + } + this.openCurrent(); + name = getValue(name); + if (attributes == null) { + attributes = {}; + } + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref1 = [attributes, text], text = ref1[0], attributes = ref1[1]; + } + this.currentNode = new XMLElement(this, name, attributes); + this.currentNode.children = false; + this.currentLevel++; + this.openTags[this.currentLevel] = this.currentNode; + if (text != null) { + this.text(text); + } + return this; + }; + + XMLDocumentCB.prototype.element = function(name, attributes, text) { + var child, i, len, oldValidationFlag, ref1, root; + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + this.dtdElement.apply(this, arguments); + } else { + if (Array.isArray(name) || isObject(name) || isFunction(name)) { + oldValidationFlag = this.options.noValidation; + this.options.noValidation = true; + root = new XMLDocument(this.options).element('TEMP_ROOT'); + root.element(name); + this.options.noValidation = oldValidationFlag; + ref1 = root.children; + for (i = 0, len = ref1.length; i < len; i++) { + child = ref1[i]; + this.createChildNode(child); + if (child.type === NodeType.Element) { + this.up(); + } + } + } else { + this.node(name, attributes, text); + } + } + return this; + }; + + XMLDocumentCB.prototype.attribute = function(name, value) { + var attName, attValue; + if (!this.currentNode || this.currentNode.children) { + throw new Error("att() can only be used immediately after an ele() call in callback mode. " + this.debugInfo(name)); + } + if (name != null) { + name = getValue(name); + } + if (isObject(name)) { + for (attName in name) { + if (!hasProp.call(name, attName)) continue; + attValue = name[attName]; + this.attribute(attName, attValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + if (this.options.keepNullAttributes && (value == null)) { + this.currentNode.attribs[name] = new XMLAttribute(this, name, ""); + } else if (value != null) { + this.currentNode.attribs[name] = new XMLAttribute(this, name, value); + } + } + return this; + }; + + XMLDocumentCB.prototype.text = function(value) { + var node; + this.openCurrent(); + node = new XMLText(this, value); + this.onData(this.writer.text(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.cdata = function(value) { + var node; + this.openCurrent(); + node = new XMLCData(this, value); + this.onData(this.writer.cdata(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.comment = function(value) { + var node; + this.openCurrent(); + node = new XMLComment(this, value); + this.onData(this.writer.comment(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.raw = function(value) { + var node; + this.openCurrent(); + node = new XMLRaw(this, value); + this.onData(this.writer.raw(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.instruction = function(target, value) { + var i, insTarget, insValue, len, node; + this.openCurrent(); + if (target != null) { + target = getValue(target); + } + if (value != null) { + value = getValue(value); + } + if (Array.isArray(target)) { + for (i = 0, len = target.length; i < len; i++) { + insTarget = target[i]; + this.instruction(insTarget); + } + } else if (isObject(target)) { + for (insTarget in target) { + if (!hasProp.call(target, insTarget)) continue; + insValue = target[insTarget]; + this.instruction(insTarget, insValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + node = new XMLProcessingInstruction(this, target, value); + this.onData(this.writer.processingInstruction(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + } + return this; + }; + + XMLDocumentCB.prototype.declaration = function(version, encoding, standalone) { + var node; + this.openCurrent(); + if (this.documentStarted) { + throw new Error("declaration() must be the first node."); + } + node = new XMLDeclaration(this, version, encoding, standalone); + this.onData(this.writer.declaration(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.doctype = function(root, pubID, sysID) { + this.openCurrent(); + if (root == null) { + throw new Error("Missing root node name."); + } + if (this.root) { + throw new Error("dtd() must come before the root node."); + } + this.currentNode = new XMLDocType(this, pubID, sysID); + this.currentNode.rootNodeName = root; + this.currentNode.children = false; + this.currentLevel++; + this.openTags[this.currentLevel] = this.currentNode; + return this; + }; + + XMLDocumentCB.prototype.dtdElement = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDElement(this, name, value); + this.onData(this.writer.dtdElement(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.attList = function(elementName, attributeName, attributeType, defaultValueType, defaultValue) { + var node; + this.openCurrent(); + node = new XMLDTDAttList(this, elementName, attributeName, attributeType, defaultValueType, defaultValue); + this.onData(this.writer.dtdAttList(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.entity = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDEntity(this, false, name, value); + this.onData(this.writer.dtdEntity(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.pEntity = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDEntity(this, true, name, value); + this.onData(this.writer.dtdEntity(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.notation = function(name, value) { + var node; + this.openCurrent(); + node = new XMLDTDNotation(this, name, value); + this.onData(this.writer.dtdNotation(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); + return this; + }; + + XMLDocumentCB.prototype.up = function() { + if (this.currentLevel < 0) { + throw new Error("The document node has no parent."); + } + if (this.currentNode) { + if (this.currentNode.children) { + this.closeNode(this.currentNode); + } else { + this.openNode(this.currentNode); + } + this.currentNode = null; + } else { + this.closeNode(this.openTags[this.currentLevel]); + } + delete this.openTags[this.currentLevel]; + this.currentLevel--; + return this; + }; + + XMLDocumentCB.prototype.end = function() { + while (this.currentLevel >= 0) { + this.up(); + } + return this.onEnd(); + }; + + XMLDocumentCB.prototype.openCurrent = function() { + if (this.currentNode) { + this.currentNode.children = true; + return this.openNode(this.currentNode); + } + }; + + XMLDocumentCB.prototype.openNode = function(node) { + var att, chunk, name, ref1; + if (!node.isOpen) { + if (!this.root && this.currentLevel === 0 && node.type === NodeType.Element) { + this.root = node; + } + chunk = ''; + if (node.type === NodeType.Element) { + this.writerOptions.state = WriterState.OpenTag; + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + '<' + node.name; + ref1 = node.attribs; + for (name in ref1) { + if (!hasProp.call(ref1, name)) continue; + att = ref1[name]; + chunk += this.writer.attribute(att, this.writerOptions, this.currentLevel); + } + chunk += (node.children ? '>' : '/>') + this.writer.endline(node, this.writerOptions, this.currentLevel); + this.writerOptions.state = WriterState.InsideTag; + } else { + this.writerOptions.state = WriterState.OpenTag; + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + ''; + } + chunk += this.writer.endline(node, this.writerOptions, this.currentLevel); + } + this.onData(chunk, this.currentLevel); + return node.isOpen = true; + } + }; + + XMLDocumentCB.prototype.closeNode = function(node) { + var chunk; + if (!node.isClosed) { + chunk = ''; + this.writerOptions.state = WriterState.CloseTag; + if (node.type === NodeType.Element) { + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + '' + this.writer.endline(node, this.writerOptions, this.currentLevel); + } else { + chunk = this.writer.indent(node, this.writerOptions, this.currentLevel) + ']>' + this.writer.endline(node, this.writerOptions, this.currentLevel); + } + this.writerOptions.state = WriterState.None; + this.onData(chunk, this.currentLevel); + return node.isClosed = true; + } + }; + + XMLDocumentCB.prototype.onData = function(chunk, level) { + this.documentStarted = true; + return this.onDataCallback(chunk, level + 1); + }; + + XMLDocumentCB.prototype.onEnd = function() { + this.documentCompleted = true; + return this.onEndCallback(); + }; + + XMLDocumentCB.prototype.debugInfo = function(name) { + if (name == null) { + return ""; + } else { + return "node: <" + name + ">"; + } + }; + + XMLDocumentCB.prototype.ele = function() { + return this.element.apply(this, arguments); + }; + + XMLDocumentCB.prototype.nod = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLDocumentCB.prototype.txt = function(value) { + return this.text(value); + }; + + XMLDocumentCB.prototype.dat = function(value) { + return this.cdata(value); + }; + + XMLDocumentCB.prototype.com = function(value) { + return this.comment(value); + }; + + XMLDocumentCB.prototype.ins = function(target, value) { + return this.instruction(target, value); + }; + + XMLDocumentCB.prototype.dec = function(version, encoding, standalone) { + return this.declaration(version, encoding, standalone); + }; + + XMLDocumentCB.prototype.dtd = function(root, pubID, sysID) { + return this.doctype(root, pubID, sysID); + }; + + XMLDocumentCB.prototype.e = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLDocumentCB.prototype.n = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLDocumentCB.prototype.t = function(value) { + return this.text(value); + }; + + XMLDocumentCB.prototype.d = function(value) { + return this.cdata(value); + }; + + XMLDocumentCB.prototype.c = function(value) { + return this.comment(value); + }; + + XMLDocumentCB.prototype.r = function(value) { + return this.raw(value); + }; + + XMLDocumentCB.prototype.i = function(target, value) { + return this.instruction(target, value); + }; + + XMLDocumentCB.prototype.att = function() { + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + return this.attList.apply(this, arguments); + } else { + return this.attribute.apply(this, arguments); + } + }; + + XMLDocumentCB.prototype.a = function() { + if (this.currentNode && this.currentNode.type === NodeType.DocType) { + return this.attList.apply(this, arguments); + } else { + return this.attribute.apply(this, arguments); + } + }; + + XMLDocumentCB.prototype.ent = function(name, value) { + return this.entity(name, value); + }; + + XMLDocumentCB.prototype.pent = function(name, value) { + return this.pEntity(name, value); + }; + + XMLDocumentCB.prototype.not = function(name, value) { + return this.notation(name, value); + }; + + return XMLDocumentCB; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDocumentFragment.js b/node_modules/xmlbuilder/lib/XMLDocumentFragment.js new file mode 100644 index 00000000..5d6039c4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDocumentFragment.js @@ -0,0 +1,24 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDocumentFragment, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDocumentFragment = (function(superClass) { + extend(XMLDocumentFragment, superClass); + + function XMLDocumentFragment() { + XMLDocumentFragment.__super__.constructor.call(this, null); + this.name = "#document-fragment"; + this.type = NodeType.DocumentFragment; + } + + return XMLDocumentFragment; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLDummy.js b/node_modules/xmlbuilder/lib/XMLDummy.js new file mode 100644 index 00000000..b26083a3 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLDummy.js @@ -0,0 +1,31 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLDummy, XMLNode, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + module.exports = XMLDummy = (function(superClass) { + extend(XMLDummy, superClass); + + function XMLDummy(parent) { + XMLDummy.__super__.constructor.call(this, parent); + this.type = NodeType.Dummy; + } + + XMLDummy.prototype.clone = function() { + return Object.create(this); + }; + + XMLDummy.prototype.toString = function(options) { + return ''; + }; + + return XMLDummy; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLElement.js b/node_modules/xmlbuilder/lib/XMLElement.js new file mode 100644 index 00000000..c1657299 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLElement.js @@ -0,0 +1,298 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLAttribute, XMLElement, XMLNamedNodeMap, XMLNode, getValue, isFunction, isObject, ref, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + ref = require('./Utility'), isObject = ref.isObject, isFunction = ref.isFunction, getValue = ref.getValue; + + XMLNode = require('./XMLNode'); + + NodeType = require('./NodeType'); + + XMLAttribute = require('./XMLAttribute'); + + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + + module.exports = XMLElement = (function(superClass) { + extend(XMLElement, superClass); + + function XMLElement(parent, name, attributes) { + var child, j, len, ref1; + XMLElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing element name. " + this.debugInfo()); + } + this.name = this.stringify.name(name); + this.type = NodeType.Element; + this.attribs = {}; + this.schemaTypeInfo = null; + if (attributes != null) { + this.attribute(attributes); + } + if (parent.type === NodeType.Document) { + this.isRoot = true; + this.documentObject = parent; + parent.rootObject = this; + if (parent.children) { + ref1 = parent.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + if (child.type === NodeType.DocType) { + child.name = this.name; + break; + } + } + } + } + } + + Object.defineProperty(XMLElement.prototype, 'tagName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLElement.prototype, 'namespaceURI', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLElement.prototype, 'prefix', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLElement.prototype, 'localName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLElement.prototype, 'id', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'className', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'classList', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'attributes', { + get: function() { + if (!this.attributeMap || !this.attributeMap.nodes) { + this.attributeMap = new XMLNamedNodeMap(this.attribs); + } + return this.attributeMap; + } + }); + + XMLElement.prototype.clone = function() { + var att, attName, clonedSelf, ref1; + clonedSelf = Object.create(this); + if (clonedSelf.isRoot) { + clonedSelf.documentObject = null; + } + clonedSelf.attribs = {}; + ref1 = this.attribs; + for (attName in ref1) { + if (!hasProp.call(ref1, attName)) continue; + att = ref1[attName]; + clonedSelf.attribs[attName] = att.clone(); + } + clonedSelf.children = []; + this.children.forEach(function(child) { + var clonedChild; + clonedChild = child.clone(); + clonedChild.parent = clonedSelf; + return clonedSelf.children.push(clonedChild); + }); + return clonedSelf; + }; + + XMLElement.prototype.attribute = function(name, value) { + var attName, attValue; + if (name != null) { + name = getValue(name); + } + if (isObject(name)) { + for (attName in name) { + if (!hasProp.call(name, attName)) continue; + attValue = name[attName]; + this.attribute(attName, attValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + if (this.options.keepNullAttributes && (value == null)) { + this.attribs[name] = new XMLAttribute(this, name, ""); + } else if (value != null) { + this.attribs[name] = new XMLAttribute(this, name, value); + } + } + return this; + }; + + XMLElement.prototype.removeAttribute = function(name) { + var attName, j, len; + if (name == null) { + throw new Error("Missing attribute name. " + this.debugInfo()); + } + name = getValue(name); + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + attName = name[j]; + delete this.attribs[attName]; + } + } else { + delete this.attribs[name]; + } + return this; + }; + + XMLElement.prototype.toString = function(options) { + return this.options.writer.element(this, this.options.writer.filterOptions(options)); + }; + + XMLElement.prototype.att = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.a = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.getAttribute = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].value; + } else { + return null; + } + }; + + XMLElement.prototype.setAttribute = function(name, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNode = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name]; + } else { + return null; + } + }; + + XMLElement.prototype.setAttributeNode = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNode = function(oldAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNS = function(namespaceURI, qualifiedName, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNodeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNodeNS = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.hasAttribute = function(name) { + return this.attribs.hasOwnProperty(name); + }; + + XMLElement.prototype.hasAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttribute = function(name, isId) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].isId; + } else { + return isId; + } + }; + + XMLElement.prototype.setIdAttributeNS = function(namespaceURI, localName, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttributeNode = function(idAttr, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.isEqualNode = function(node) { + var i, j, ref1; + if (!XMLElement.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.namespaceURI !== this.namespaceURI) { + return false; + } + if (node.prefix !== this.prefix) { + return false; + } + if (node.localName !== this.localName) { + return false; + } + if (node.attribs.length !== this.attribs.length) { + return false; + } + for (i = j = 0, ref1 = this.attribs.length - 1; 0 <= ref1 ? j <= ref1 : j >= ref1; i = 0 <= ref1 ? ++j : --j) { + if (!this.attribs[i].isEqualNode(node.attribs[i])) { + return false; + } + } + return true; + }; + + return XMLElement; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js b/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js new file mode 100644 index 00000000..885402d0 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNamedNodeMap.js @@ -0,0 +1,58 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNamedNodeMap; + + module.exports = XMLNamedNodeMap = (function() { + function XMLNamedNodeMap(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNamedNodeMap.prototype, 'length', { + get: function() { + return Object.keys(this.nodes).length || 0; + } + }); + + XMLNamedNodeMap.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNamedNodeMap.prototype.getNamedItem = function(name) { + return this.nodes[name]; + }; + + XMLNamedNodeMap.prototype.setNamedItem = function(node) { + var oldNode; + oldNode = this.nodes[node.nodeName]; + this.nodes[node.nodeName] = node; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.removeNamedItem = function(name) { + var oldNode; + oldNode = this.nodes[name]; + delete this.nodes[name]; + return oldNode || null; + }; + + XMLNamedNodeMap.prototype.item = function(index) { + return this.nodes[Object.keys(this.nodes)[index]] || null; + }; + + XMLNamedNodeMap.prototype.getNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.setNamedItemNS = function(node) { + throw new Error("This DOM method is not implemented."); + }; + + XMLNamedNodeMap.prototype.removeNamedItemNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLNamedNodeMap; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNode.js b/node_modules/xmlbuilder/lib/XMLNode.js new file mode 100644 index 00000000..e2c7bb76 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNode.js @@ -0,0 +1,785 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var DocumentPosition, NodeType, XMLCData, XMLComment, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLNamedNodeMap, XMLNode, XMLNodeList, XMLProcessingInstruction, XMLRaw, XMLText, getValue, isEmpty, isFunction, isObject, ref1, + hasProp = {}.hasOwnProperty; + + ref1 = require('./Utility'), isObject = ref1.isObject, isFunction = ref1.isFunction, isEmpty = ref1.isEmpty, getValue = ref1.getValue; + + XMLElement = null; + + XMLCData = null; + + XMLComment = null; + + XMLDeclaration = null; + + XMLDocType = null; + + XMLRaw = null; + + XMLText = null; + + XMLProcessingInstruction = null; + + XMLDummy = null; + + NodeType = null; + + XMLNodeList = null; + + XMLNamedNodeMap = null; + + DocumentPosition = null; + + module.exports = XMLNode = (function() { + function XMLNode(parent1) { + this.parent = parent1; + if (this.parent) { + this.options = this.parent.options; + this.stringify = this.parent.stringify; + } + this.value = null; + this.children = []; + this.baseURI = null; + if (!XMLElement) { + XMLElement = require('./XMLElement'); + XMLCData = require('./XMLCData'); + XMLComment = require('./XMLComment'); + XMLDeclaration = require('./XMLDeclaration'); + XMLDocType = require('./XMLDocType'); + XMLRaw = require('./XMLRaw'); + XMLText = require('./XMLText'); + XMLProcessingInstruction = require('./XMLProcessingInstruction'); + XMLDummy = require('./XMLDummy'); + NodeType = require('./NodeType'); + XMLNodeList = require('./XMLNodeList'); + XMLNamedNodeMap = require('./XMLNamedNodeMap'); + DocumentPosition = require('./DocumentPosition'); + } + } + + Object.defineProperty(XMLNode.prototype, 'nodeName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeType', { + get: function() { + return this.type; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nodeValue', { + get: function() { + return this.value; + } + }); + + Object.defineProperty(XMLNode.prototype, 'parentNode', { + get: function() { + return this.parent; + } + }); + + Object.defineProperty(XMLNode.prototype, 'childNodes', { + get: function() { + if (!this.childNodeList || !this.childNodeList.nodes) { + this.childNodeList = new XMLNodeList(this.children); + } + return this.childNodeList; + } + }); + + Object.defineProperty(XMLNode.prototype, 'firstChild', { + get: function() { + return this.children[0] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'lastChild', { + get: function() { + return this.children[this.children.length - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'previousSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i - 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'nextSibling', { + get: function() { + var i; + i = this.parent.children.indexOf(this); + return this.parent.children[i + 1] || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'ownerDocument', { + get: function() { + return this.document() || null; + } + }); + + Object.defineProperty(XMLNode.prototype, 'textContent', { + get: function() { + var child, j, len, ref2, str; + if (this.nodeType === NodeType.Element || this.nodeType === NodeType.DocumentFragment) { + str = ''; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (child.textContent) { + str += child.textContent; + } + } + return str; + } else { + return null; + } + }, + set: function(value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + XMLNode.prototype.setParent = function(parent) { + var child, j, len, ref2, results; + this.parent = parent; + if (parent) { + this.options = parent.options; + this.stringify = parent.stringify; + } + ref2 = this.children; + results = []; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + results.push(child.setParent(this)); + } + return results; + }; + + XMLNode.prototype.element = function(name, attributes, text) { + var childNode, item, j, k, key, lastChild, len, len1, ref2, ref3, val; + lastChild = null; + if (attributes === null && (text == null)) { + ref2 = [{}, null], attributes = ref2[0], text = ref2[1]; + } + if (attributes == null) { + attributes = {}; + } + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref3 = [attributes, text], text = ref3[0], attributes = ref3[1]; + } + if (name != null) { + name = getValue(name); + } + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + item = name[j]; + lastChild = this.element(item); + } + } else if (isFunction(name)) { + lastChild = this.element(name.apply()); + } else if (isObject(name)) { + for (key in name) { + if (!hasProp.call(name, key)) continue; + val = name[key]; + if (isFunction(val)) { + val = val.apply(); + } + if (!this.options.ignoreDecorators && this.stringify.convertAttKey && key.indexOf(this.stringify.convertAttKey) === 0) { + lastChild = this.attribute(key.substr(this.stringify.convertAttKey.length), val); + } else if (!this.options.separateArrayItems && Array.isArray(val) && isEmpty(val)) { + lastChild = this.dummy(); + } else if (isObject(val) && isEmpty(val)) { + lastChild = this.element(key); + } else if (!this.options.keepNullNodes && (val == null)) { + lastChild = this.dummy(); + } else if (!this.options.separateArrayItems && Array.isArray(val)) { + for (k = 0, len1 = val.length; k < len1; k++) { + item = val[k]; + childNode = {}; + childNode[key] = item; + lastChild = this.element(childNode); + } + } else if (isObject(val)) { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && key.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.element(val); + } else { + lastChild = this.element(key); + lastChild.element(val); + } + } else { + lastChild = this.element(key, val); + } + } + } else if (!this.options.keepNullNodes && text === null) { + lastChild = this.dummy(); + } else { + if (!this.options.ignoreDecorators && this.stringify.convertTextKey && name.indexOf(this.stringify.convertTextKey) === 0) { + lastChild = this.text(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCDataKey && name.indexOf(this.stringify.convertCDataKey) === 0) { + lastChild = this.cdata(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertCommentKey && name.indexOf(this.stringify.convertCommentKey) === 0) { + lastChild = this.comment(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertRawKey && name.indexOf(this.stringify.convertRawKey) === 0) { + lastChild = this.raw(text); + } else if (!this.options.ignoreDecorators && this.stringify.convertPIKey && name.indexOf(this.stringify.convertPIKey) === 0) { + lastChild = this.instruction(name.substr(this.stringify.convertPIKey.length), text); + } else { + lastChild = this.node(name, attributes, text); + } + } + if (lastChild == null) { + throw new Error("Could not create any elements with: " + name + ". " + this.debugInfo()); + } + return lastChild; + }; + + XMLNode.prototype.insertBefore = function(name, attributes, text) { + var child, i, newChild, refChild, removed; + if (name != null ? name.type : void 0) { + newChild = name; + refChild = attributes; + newChild.setParent(this); + if (refChild) { + i = children.indexOf(refChild); + removed = children.splice(i); + children.push(newChild); + Array.prototype.push.apply(children, removed); + } else { + children.push(newChild); + } + return newChild; + } else { + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + } + }; + + XMLNode.prototype.insertAfter = function(name, attributes, text) { + var child, i, removed; + if (this.isRoot) { + throw new Error("Cannot insert elements at root level. " + this.debugInfo(name)); + } + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.element(name, attributes, text); + Array.prototype.push.apply(this.parent.children, removed); + return child; + }; + + XMLNode.prototype.remove = function() { + var i, ref2; + if (this.isRoot) { + throw new Error("Cannot remove the root element. " + this.debugInfo()); + } + i = this.parent.children.indexOf(this); + [].splice.apply(this.parent.children, [i, i - i + 1].concat(ref2 = [])), ref2; + return this.parent; + }; + + XMLNode.prototype.node = function(name, attributes, text) { + var child, ref2; + if (name != null) { + name = getValue(name); + } + attributes || (attributes = {}); + attributes = getValue(attributes); + if (!isObject(attributes)) { + ref2 = [attributes, text], text = ref2[0], attributes = ref2[1]; + } + child = new XMLElement(this, name, attributes); + if (text != null) { + child.text(text); + } + this.children.push(child); + return child; + }; + + XMLNode.prototype.text = function(value) { + var child; + if (isObject(value)) { + this.element(value); + } + child = new XMLText(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.cdata = function(value) { + var child; + child = new XMLCData(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.comment = function(value) { + var child; + child = new XMLComment(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.commentBefore = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.commentAfter = function(value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.comment(value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.raw = function(value) { + var child; + child = new XMLRaw(this, value); + this.children.push(child); + return this; + }; + + XMLNode.prototype.dummy = function() { + var child; + child = new XMLDummy(this); + return child; + }; + + XMLNode.prototype.instruction = function(target, value) { + var insTarget, insValue, instruction, j, len; + if (target != null) { + target = getValue(target); + } + if (value != null) { + value = getValue(value); + } + if (Array.isArray(target)) { + for (j = 0, len = target.length; j < len; j++) { + insTarget = target[j]; + this.instruction(insTarget); + } + } else if (isObject(target)) { + for (insTarget in target) { + if (!hasProp.call(target, insTarget)) continue; + insValue = target[insTarget]; + this.instruction(insTarget, insValue); + } + } else { + if (isFunction(value)) { + value = value.apply(); + } + instruction = new XMLProcessingInstruction(this, target, value); + this.children.push(instruction); + } + return this; + }; + + XMLNode.prototype.instructionBefore = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.instructionAfter = function(target, value) { + var child, i, removed; + i = this.parent.children.indexOf(this); + removed = this.parent.children.splice(i + 1); + child = this.parent.instruction(target, value); + Array.prototype.push.apply(this.parent.children, removed); + return this; + }; + + XMLNode.prototype.declaration = function(version, encoding, standalone) { + var doc, xmldec; + doc = this.document(); + xmldec = new XMLDeclaration(doc, version, encoding, standalone); + if (doc.children.length === 0) { + doc.children.unshift(xmldec); + } else if (doc.children[0].type === NodeType.Declaration) { + doc.children[0] = xmldec; + } else { + doc.children.unshift(xmldec); + } + return doc.root() || doc; + }; + + XMLNode.prototype.dtd = function(pubID, sysID) { + var child, doc, doctype, i, j, k, len, len1, ref2, ref3; + doc = this.document(); + doctype = new XMLDocType(doc, pubID, sysID); + ref2 = doc.children; + for (i = j = 0, len = ref2.length; j < len; i = ++j) { + child = ref2[i]; + if (child.type === NodeType.DocType) { + doc.children[i] = doctype; + return doctype; + } + } + ref3 = doc.children; + for (i = k = 0, len1 = ref3.length; k < len1; i = ++k) { + child = ref3[i]; + if (child.isRoot) { + doc.children.splice(i, 0, doctype); + return doctype; + } + } + doc.children.push(doctype); + return doctype; + }; + + XMLNode.prototype.up = function() { + if (this.isRoot) { + throw new Error("The root node has no parent. Use doc() if you need to get the document object."); + } + return this.parent; + }; + + XMLNode.prototype.root = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node.rootObject; + } else if (node.isRoot) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.document = function() { + var node; + node = this; + while (node) { + if (node.type === NodeType.Document) { + return node; + } else { + node = node.parent; + } + } + }; + + XMLNode.prototype.end = function(options) { + return this.document().end(options); + }; + + XMLNode.prototype.prev = function() { + var i; + i = this.parent.children.indexOf(this); + if (i < 1) { + throw new Error("Already at the first node. " + this.debugInfo()); + } + return this.parent.children[i - 1]; + }; + + XMLNode.prototype.next = function() { + var i; + i = this.parent.children.indexOf(this); + if (i === -1 || i === this.parent.children.length - 1) { + throw new Error("Already at the last node. " + this.debugInfo()); + } + return this.parent.children[i + 1]; + }; + + XMLNode.prototype.importDocument = function(doc) { + var clonedRoot; + clonedRoot = doc.root().clone(); + clonedRoot.parent = this; + clonedRoot.isRoot = false; + this.children.push(clonedRoot); + return this; + }; + + XMLNode.prototype.debugInfo = function(name) { + var ref2, ref3; + name = name || this.name; + if ((name == null) && !((ref2 = this.parent) != null ? ref2.name : void 0)) { + return ""; + } else if (name == null) { + return "parent: <" + this.parent.name + ">"; + } else if (!((ref3 = this.parent) != null ? ref3.name : void 0)) { + return "node: <" + name + ">"; + } else { + return "node: <" + name + ">, parent: <" + this.parent.name + ">"; + } + }; + + XMLNode.prototype.ele = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.nod = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.txt = function(value) { + return this.text(value); + }; + + XMLNode.prototype.dat = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.com = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.ins = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.doc = function() { + return this.document(); + }; + + XMLNode.prototype.dec = function(version, encoding, standalone) { + return this.declaration(version, encoding, standalone); + }; + + XMLNode.prototype.e = function(name, attributes, text) { + return this.element(name, attributes, text); + }; + + XMLNode.prototype.n = function(name, attributes, text) { + return this.node(name, attributes, text); + }; + + XMLNode.prototype.t = function(value) { + return this.text(value); + }; + + XMLNode.prototype.d = function(value) { + return this.cdata(value); + }; + + XMLNode.prototype.c = function(value) { + return this.comment(value); + }; + + XMLNode.prototype.r = function(value) { + return this.raw(value); + }; + + XMLNode.prototype.i = function(target, value) { + return this.instruction(target, value); + }; + + XMLNode.prototype.u = function() { + return this.up(); + }; + + XMLNode.prototype.importXMLBuilder = function(doc) { + return this.importDocument(doc); + }; + + XMLNode.prototype.replaceChild = function(newChild, oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.removeChild = function(oldChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.appendChild = function(newChild) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.hasChildNodes = function() { + return this.children.length !== 0; + }; + + XMLNode.prototype.cloneNode = function(deep) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.normalize = function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isSupported = function(feature, version) { + return true; + }; + + XMLNode.prototype.hasAttributes = function() { + return this.attribs.length !== 0; + }; + + XMLNode.prototype.compareDocumentPosition = function(other) { + var ref, res; + ref = this; + if (ref === other) { + return 0; + } else if (this.document() !== other.document()) { + res = DocumentPosition.Disconnected | DocumentPosition.ImplementationSpecific; + if (Math.random() < 0.5) { + res |= DocumentPosition.Preceding; + } else { + res |= DocumentPosition.Following; + } + return res; + } else if (ref.isAncestor(other)) { + return DocumentPosition.Contains | DocumentPosition.Preceding; + } else if (ref.isDescendant(other)) { + return DocumentPosition.Contains | DocumentPosition.Following; + } else if (ref.isPreceding(other)) { + return DocumentPosition.Preceding; + } else { + return DocumentPosition.Following; + } + }; + + XMLNode.prototype.isSameNode = function(other) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupPrefix = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isDefaultNamespace = function(namespaceURI) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.lookupNamespaceURI = function(prefix) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.isEqualNode = function(node) { + var i, j, ref2; + if (node.nodeType !== this.nodeType) { + return false; + } + if (node.children.length !== this.children.length) { + return false; + } + for (i = j = 0, ref2 = this.children.length - 1; 0 <= ref2 ? j <= ref2 : j >= ref2; i = 0 <= ref2 ? ++j : --j) { + if (!this.children[i].isEqualNode(node.children[i])) { + return false; + } + } + return true; + }; + + XMLNode.prototype.getFeature = function(feature, version) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.setUserData = function(key, data, handler) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.getUserData = function(key) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLNode.prototype.contains = function(other) { + if (!other) { + return false; + } + return other === this || this.isDescendant(other); + }; + + XMLNode.prototype.isDescendant = function(node) { + var child, isDescendantChild, j, len, ref2; + ref2 = this.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (node === child) { + return true; + } + isDescendantChild = child.isDescendant(node); + if (isDescendantChild) { + return true; + } + } + return false; + }; + + XMLNode.prototype.isAncestor = function(node) { + return node.isDescendant(this); + }; + + XMLNode.prototype.isPreceding = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos < thisPos; + } + }; + + XMLNode.prototype.isFollowing = function(node) { + var nodePos, thisPos; + nodePos = this.treePosition(node); + thisPos = this.treePosition(this); + if (nodePos === -1 || thisPos === -1) { + return false; + } else { + return nodePos > thisPos; + } + }; + + XMLNode.prototype.treePosition = function(node) { + var found, pos; + pos = 0; + found = false; + this.foreachTreeNode(this.document(), function(childNode) { + pos++; + if (!found && childNode === node) { + return found = true; + } + }); + if (found) { + return pos; + } else { + return -1; + } + }; + + XMLNode.prototype.foreachTreeNode = function(node, func) { + var child, j, len, ref2, res; + node || (node = this.document()); + ref2 = node.children; + for (j = 0, len = ref2.length; j < len; j++) { + child = ref2[j]; + if (res = func(child)) { + return res; + } else { + res = this.foreachTreeNode(child, func); + if (res) { + return res; + } + } + } + }; + + return XMLNode; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNodeFilter.js b/node_modules/xmlbuilder/lib/XMLNodeFilter.js new file mode 100644 index 00000000..ce32fd59 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNodeFilter.js @@ -0,0 +1,48 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNodeFilter; + + module.exports = XMLNodeFilter = (function() { + function XMLNodeFilter() {} + + XMLNodeFilter.prototype.FilterAccept = 1; + + XMLNodeFilter.prototype.FilterReject = 2; + + XMLNodeFilter.prototype.FilterSkip = 3; + + XMLNodeFilter.prototype.ShowAll = 0xffffffff; + + XMLNodeFilter.prototype.ShowElement = 0x1; + + XMLNodeFilter.prototype.ShowAttribute = 0x2; + + XMLNodeFilter.prototype.ShowText = 0x4; + + XMLNodeFilter.prototype.ShowCDataSection = 0x8; + + XMLNodeFilter.prototype.ShowEntityReference = 0x10; + + XMLNodeFilter.prototype.ShowEntity = 0x20; + + XMLNodeFilter.prototype.ShowProcessingInstruction = 0x40; + + XMLNodeFilter.prototype.ShowComment = 0x80; + + XMLNodeFilter.prototype.ShowDocument = 0x100; + + XMLNodeFilter.prototype.ShowDocumentType = 0x200; + + XMLNodeFilter.prototype.ShowDocumentFragment = 0x400; + + XMLNodeFilter.prototype.ShowNotation = 0x800; + + XMLNodeFilter.prototype.acceptNode = function(node) { + throw new Error("This DOM method is not implemented."); + }; + + return XMLNodeFilter; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLNodeList.js b/node_modules/xmlbuilder/lib/XMLNodeList.js new file mode 100644 index 00000000..3414a3e4 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLNodeList.js @@ -0,0 +1,28 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLNodeList; + + module.exports = XMLNodeList = (function() { + function XMLNodeList(nodes) { + this.nodes = nodes; + } + + Object.defineProperty(XMLNodeList.prototype, 'length', { + get: function() { + return this.nodes.length || 0; + } + }); + + XMLNodeList.prototype.clone = function() { + return this.nodes = null; + }; + + XMLNodeList.prototype.item = function(index) { + return this.nodes[index] || null; + }; + + return XMLNodeList; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js b/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js new file mode 100644 index 00000000..d4333d46 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLProcessingInstruction.js @@ -0,0 +1,49 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLCharacterData, XMLProcessingInstruction, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLCharacterData = require('./XMLCharacterData'); + + module.exports = XMLProcessingInstruction = (function(superClass) { + extend(XMLProcessingInstruction, superClass); + + function XMLProcessingInstruction(parent, target, value) { + XMLProcessingInstruction.__super__.constructor.call(this, parent); + if (target == null) { + throw new Error("Missing instruction target. " + this.debugInfo()); + } + this.type = NodeType.ProcessingInstruction; + this.target = this.stringify.insTarget(target); + this.name = this.target; + if (value) { + this.value = this.stringify.insValue(value); + } + } + + XMLProcessingInstruction.prototype.clone = function() { + return Object.create(this); + }; + + XMLProcessingInstruction.prototype.toString = function(options) { + return this.options.writer.processingInstruction(this, this.options.writer.filterOptions(options)); + }; + + XMLProcessingInstruction.prototype.isEqualNode = function(node) { + if (!XMLProcessingInstruction.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.target !== this.target) { + return false; + } + return true; + }; + + return XMLProcessingInstruction; + + })(XMLCharacterData); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLRaw.js b/node_modules/xmlbuilder/lib/XMLRaw.js new file mode 100644 index 00000000..b5928502 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLRaw.js @@ -0,0 +1,35 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLNode, XMLRaw, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLNode = require('./XMLNode'); + + module.exports = XMLRaw = (function(superClass) { + extend(XMLRaw, superClass); + + function XMLRaw(parent, text) { + XMLRaw.__super__.constructor.call(this, parent); + if (text == null) { + throw new Error("Missing raw text. " + this.debugInfo()); + } + this.type = NodeType.Raw; + this.value = this.stringify.raw(text); + } + + XMLRaw.prototype.clone = function() { + return Object.create(this); + }; + + XMLRaw.prototype.toString = function(options) { + return this.options.writer.raw(this, this.options.writer.filterOptions(options)); + }; + + return XMLRaw; + + })(XMLNode); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStreamWriter.js b/node_modules/xmlbuilder/lib/XMLStreamWriter.js new file mode 100644 index 00000000..159dc6b6 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStreamWriter.js @@ -0,0 +1,176 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLStreamWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NodeType = require('./NodeType'); + + XMLWriterBase = require('./XMLWriterBase'); + + WriterState = require('./WriterState'); + + module.exports = XMLStreamWriter = (function(superClass) { + extend(XMLStreamWriter, superClass); + + function XMLStreamWriter(stream, options) { + this.stream = stream; + XMLStreamWriter.__super__.constructor.call(this, options); + } + + XMLStreamWriter.prototype.endline = function(node, options, level) { + if (node.isLastRootNode && options.state === WriterState.CloseTag) { + return ''; + } else { + return XMLStreamWriter.__super__.endline.call(this, node, options, level); + } + }; + + XMLStreamWriter.prototype.document = function(doc, options) { + var child, i, j, k, len, len1, ref, ref1, results; + ref = doc.children; + for (i = j = 0, len = ref.length; j < len; i = ++j) { + child = ref[i]; + child.isLastRootNode = i === doc.children.length - 1; + } + options = this.filterOptions(options); + ref1 = doc.children; + results = []; + for (k = 0, len1 = ref1.length; k < len1; k++) { + child = ref1[k]; + results.push(this.writeChildNode(child, options, 0)); + } + return results; + }; + + XMLStreamWriter.prototype.attribute = function(att, options, level) { + return this.stream.write(XMLStreamWriter.__super__.attribute.call(this, att, options, level)); + }; + + XMLStreamWriter.prototype.cdata = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.cdata.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.comment = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.comment.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.declaration = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.declaration.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.docType = function(node, options, level) { + var child, j, len, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level)); + this.stream.write(' 0) { + this.stream.write(' ['); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref = node.children; + for (j = 0, len = ref.length; j < len; j++) { + child = ref[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(']'); + } + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '>'); + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, j, len, name, prettySuppressed, ref, ref1; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + this.stream.write(this.indent(node, options, level) + '<' + node.name); + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + this.stream.write('>'); + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + options.state = WriterState.CloseTag; + this.stream.write(options.spaceBeforeSlash + '/>'); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + this.stream.write('>'); + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + this.stream.write(''); + } else { + this.stream.write('>' + this.endline(node, options, level)); + options.state = WriterState.InsideTag; + ref1 = node.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + this.stream.write(this.indent(node, options, level) + ''); + } + this.stream.write(this.endline(node, options, level)); + options.state = WriterState.None; + return this.closeNode(node, options, level); + }; + + XMLStreamWriter.prototype.processingInstruction = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.processingInstruction.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.raw = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.raw.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.text = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.text.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdAttList = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdAttList.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdElement = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdElement.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdEntity = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdEntity.call(this, node, options, level)); + }; + + XMLStreamWriter.prototype.dtdNotation = function(node, options, level) { + return this.stream.write(XMLStreamWriter.__super__.dtdNotation.call(this, node, options, level)); + }; + + return XMLStreamWriter; + + })(XMLWriterBase); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStringWriter.js b/node_modules/xmlbuilder/lib/XMLStringWriter.js new file mode 100644 index 00000000..71870170 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStringWriter.js @@ -0,0 +1,35 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + XMLWriterBase = require('./XMLWriterBase'); + + module.exports = XMLStringWriter = (function(superClass) { + extend(XMLStringWriter, superClass); + + function XMLStringWriter(options) { + XMLStringWriter.__super__.constructor.call(this, options); + } + + XMLStringWriter.prototype.document = function(doc, options) { + var child, i, len, r, ref; + options = this.filterOptions(options); + r = ''; + ref = doc.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, 0); + } + if (options.pretty && r.slice(-options.newline.length) === options.newline) { + r = r.slice(0, -options.newline.length); + } + return r; + }; + + return XMLStringWriter; + + })(XMLWriterBase); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/XMLStringifier.js b/node_modules/xmlbuilder/lib/XMLStringifier.js new file mode 100644 index 00000000..a39475c1 --- /dev/null +++ b/node_modules/xmlbuilder/lib/XMLStringifier.js @@ -0,0 +1,240 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringifier, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + hasProp = {}.hasOwnProperty; + + module.exports = XMLStringifier = (function() { + function XMLStringifier(options) { + this.assertLegalName = bind(this.assertLegalName, this); + this.assertLegalChar = bind(this.assertLegalChar, this); + var key, ref, value; + options || (options = {}); + this.options = options; + if (!this.options.version) { + this.options.version = '1.0'; + } + ref = options.stringify || {}; + for (key in ref) { + if (!hasProp.call(ref, key)) continue; + value = ref[key]; + this[key] = value; + } + } + + XMLStringifier.prototype.name = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalName('' + val || ''); + }; + + XMLStringifier.prototype.text = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.textEscape('' + val || '')); + }; + + XMLStringifier.prototype.cdata = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + val = val.replace(']]>', ']]]]>'); + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.comment = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/--/)) { + throw new Error("Comment text cannot contain double-hypen: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.raw = function(val) { + if (this.options.noValidation) { + return val; + } + return '' + val || ''; + }; + + XMLStringifier.prototype.attValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar(this.attEscape(val = '' + val || '')); + }; + + XMLStringifier.prototype.insTarget = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.insValue = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (val.match(/\?>/)) { + throw new Error("Invalid processing instruction value: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlVersion = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/1\.[0-9]+/)) { + throw new Error("Invalid version number: " + val); + } + return val; + }; + + XMLStringifier.prototype.xmlEncoding = function(val) { + if (this.options.noValidation) { + return val; + } + val = '' + val || ''; + if (!val.match(/^[A-Za-z](?:[A-Za-z0-9._-])*$/)) { + throw new Error("Invalid encoding: " + val); + } + return this.assertLegalChar(val); + }; + + XMLStringifier.prototype.xmlStandalone = function(val) { + if (this.options.noValidation) { + return val; + } + if (val) { + return "yes"; + } else { + return "no"; + } + }; + + XMLStringifier.prototype.dtdPubID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdSysID = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdElementValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttType = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdAttDefault = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdEntityValue = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.dtdNData = function(val) { + if (this.options.noValidation) { + return val; + } + return this.assertLegalChar('' + val || ''); + }; + + XMLStringifier.prototype.convertAttKey = '@'; + + XMLStringifier.prototype.convertPIKey = '?'; + + XMLStringifier.prototype.convertTextKey = '#text'; + + XMLStringifier.prototype.convertCDataKey = '#cdata'; + + XMLStringifier.prototype.convertCommentKey = '#comment'; + + XMLStringifier.prototype.convertRawKey = '#raw'; + + XMLStringifier.prototype.assertLegalChar = function(str) { + var regex, res; + if (this.options.noValidation) { + return str; + } + regex = ''; + if (this.options.version === '1.0') { + regex = /[\0-\x08\x0B\f\x0E-\x1F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } else if (this.options.version === '1.1') { + regex = /[\0\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; + if (res = str.match(regex)) { + throw new Error("Invalid character in string: " + str + " at index " + res.index); + } + } + return str; + }; + + XMLStringifier.prototype.assertLegalName = function(str) { + var regex; + if (this.options.noValidation) { + return str; + } + this.assertLegalChar(str); + regex = /^([:A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])([\x2D\.0-:A-Z_a-z\xB7\xC0-\xD6\xD8-\xF6\xF8-\u037D\u037F-\u1FFF\u200C\u200D\u203F\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]|[\uD800-\uDB7F][\uDC00-\uDFFF])*$/; + if (!str.match(regex)) { + throw new Error("Invalid character in name"); + } + return str; + }; + + XMLStringifier.prototype.textEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(//g, '>').replace(/\r/g, ' '); + }; + + XMLStringifier.prototype.attEscape = function(str) { + var ampregex; + if (this.options.noValidation) { + return str; + } + ampregex = this.options.noDoubleEncoding ? /(?!&\S+;)&/g : /&/g; + return str.replace(ampregex, '&').replace(/ 0) { + return new Array(indentLevel).join(options.indent); + } + } + return ''; + }; + + XMLWriterBase.prototype.endline = function(node, options, level) { + if (!options.pretty || options.suppressPrettyCount) { + return ''; + } else { + return options.newline; + } + }; + + XMLWriterBase.prototype.attribute = function(att, options, level) { + var r; + this.openAttribute(att, options, level); + r = ' ' + att.name + '="' + att.value + '"'; + this.closeAttribute(att, options, level); + return r; + }; + + XMLWriterBase.prototype.cdata = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.comment = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.declaration = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.docType = function(node, options, level) { + var child, i, len, r, ref; + level || (level = 0); + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + r += ' 0) { + r += ' ['; + r += this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref = node.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += ']'; + } + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '>'; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.element = function(node, options, level) { + var att, child, childNodeCount, firstChildNode, i, j, len, len1, name, prettySuppressed, r, ref, ref1, ref2; + level || (level = 0); + prettySuppressed = false; + r = ''; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r += this.indent(node, options, level) + '<' + node.name; + ref = node.attribs; + for (name in ref) { + if (!hasProp.call(ref, name)) continue; + att = ref[name]; + r += this.attribute(att, options, level); + } + childNodeCount = node.children.length; + firstChildNode = childNodeCount === 0 ? null : node.children[0]; + if (childNodeCount === 0 || node.children.every(function(e) { + return (e.type === NodeType.Text || e.type === NodeType.Raw) && e.value === ''; + })) { + if (options.allowEmpty) { + r += '>'; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + options.state = WriterState.CloseTag; + r += options.spaceBeforeSlash + '/>' + this.endline(node, options, level); + } + } else if (options.pretty && childNodeCount === 1 && (firstChildNode.type === NodeType.Text || firstChildNode.type === NodeType.Raw) && (firstChildNode.value != null)) { + r += '>'; + options.state = WriterState.InsideTag; + options.suppressPrettyCount++; + prettySuppressed = true; + r += this.writeChildNode(firstChildNode, options, level + 1); + options.suppressPrettyCount--; + prettySuppressed = false; + options.state = WriterState.CloseTag; + r += '' + this.endline(node, options, level); + } else { + if (options.dontPrettyTextNodes) { + ref1 = node.children; + for (i = 0, len = ref1.length; i < len; i++) { + child = ref1[i]; + if ((child.type === NodeType.Text || child.type === NodeType.Raw) && (child.value != null)) { + options.suppressPrettyCount++; + prettySuppressed = true; + break; + } + } + } + r += '>' + this.endline(node, options, level); + options.state = WriterState.InsideTag; + ref2 = node.children; + for (j = 0, len1 = ref2.length; j < len1; j++) { + child = ref2[j]; + r += this.writeChildNode(child, options, level + 1); + } + options.state = WriterState.CloseTag; + r += this.indent(node, options, level) + ''; + if (prettySuppressed) { + options.suppressPrettyCount--; + } + r += this.endline(node, options, level); + options.state = WriterState.None; + } + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.writeChildNode = function(node, options, level) { + switch (node.type) { + case NodeType.CData: + return this.cdata(node, options, level); + case NodeType.Comment: + return this.comment(node, options, level); + case NodeType.Element: + return this.element(node, options, level); + case NodeType.Raw: + return this.raw(node, options, level); + case NodeType.Text: + return this.text(node, options, level); + case NodeType.ProcessingInstruction: + return this.processingInstruction(node, options, level); + case NodeType.Dummy: + return ''; + case NodeType.Declaration: + return this.declaration(node, options, level); + case NodeType.DocType: + return this.docType(node, options, level); + case NodeType.AttributeDeclaration: + return this.dtdAttList(node, options, level); + case NodeType.ElementDeclaration: + return this.dtdElement(node, options, level); + case NodeType.EntityDeclaration: + return this.dtdEntity(node, options, level); + case NodeType.NotationDeclaration: + return this.dtdNotation(node, options, level); + default: + throw new Error("Unknown XML node type: " + node.constructor.name); + } + }; + + XMLWriterBase.prototype.processingInstruction = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + ''; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.raw = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.text = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level); + options.state = WriterState.InsideTag; + r += node.value; + options.state = WriterState.CloseTag; + r += this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdAttList = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdElement = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdEntity = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.dtdNotation = function(node, options, level) { + var r; + this.openNode(node, options, level); + options.state = WriterState.OpenTag; + r = this.indent(node, options, level) + '' + this.endline(node, options, level); + options.state = WriterState.None; + this.closeNode(node, options, level); + return r; + }; + + XMLWriterBase.prototype.openNode = function(node, options, level) {}; + + XMLWriterBase.prototype.closeNode = function(node, options, level) {}; + + XMLWriterBase.prototype.openAttribute = function(att, options, level) {}; + + XMLWriterBase.prototype.closeAttribute = function(att, options, level) {}; + + return XMLWriterBase; + + })(); + +}).call(this); diff --git a/node_modules/xmlbuilder/lib/index.js b/node_modules/xmlbuilder/lib/index.js new file mode 100644 index 00000000..b1ed2636 --- /dev/null +++ b/node_modules/xmlbuilder/lib/index.js @@ -0,0 +1,65 @@ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, WriterState, XMLDOMImplementation, XMLDocument, XMLDocumentCB, XMLStreamWriter, XMLStringWriter, assign, isFunction, ref; + + ref = require('./Utility'), assign = ref.assign, isFunction = ref.isFunction; + + XMLDOMImplementation = require('./XMLDOMImplementation'); + + XMLDocument = require('./XMLDocument'); + + XMLDocumentCB = require('./XMLDocumentCB'); + + XMLStringWriter = require('./XMLStringWriter'); + + XMLStreamWriter = require('./XMLStreamWriter'); + + NodeType = require('./NodeType'); + + WriterState = require('./WriterState'); + + module.exports.create = function(name, xmldec, doctype, options) { + var doc, root; + if (name == null) { + throw new Error("Root element needs a name."); + } + options = assign({}, xmldec, doctype, options); + doc = new XMLDocument(options); + root = doc.element(name); + if (!options.headless) { + doc.declaration(options); + if ((options.pubID != null) || (options.sysID != null)) { + doc.dtd(options); + } + } + return root; + }; + + module.exports.begin = function(options, onData, onEnd) { + var ref1; + if (isFunction(options)) { + ref1 = [options, onData], onData = ref1[0], onEnd = ref1[1]; + options = {}; + } + if (onData) { + return new XMLDocumentCB(options, onData, onEnd); + } else { + return new XMLDocument(options); + } + }; + + module.exports.stringWriter = function(options) { + return new XMLStringWriter(options); + }; + + module.exports.streamWriter = function(stream, options) { + return new XMLStreamWriter(stream, options); + }; + + module.exports.implementation = new XMLDOMImplementation(); + + module.exports.nodeType = NodeType; + + module.exports.writerState = WriterState; + +}).call(this); diff --git a/node_modules/xmlbuilder/package.json b/node_modules/xmlbuilder/package.json new file mode 100644 index 00000000..512cd977 --- /dev/null +++ b/node_modules/xmlbuilder/package.json @@ -0,0 +1,39 @@ +{ + "name": "xmlbuilder", + "version": "11.0.1", + "keywords": [ + "xml", + "xmlbuilder" + ], + "homepage": "http://github.com/oozcitak/xmlbuilder-js", + "description": "An XML builder for node.js", + "author": "Ozgur Ozcitak ", + "contributors": [], + "license": "MIT", + "repository": { + "type": "git", + "url": "git://github.com/oozcitak/xmlbuilder-js.git" + }, + "bugs": { + "url": "http://github.com/oozcitak/xmlbuilder-js/issues" + }, + "main": "./lib/index", + "typings": "./typings/index.d.ts", + "engines": { + "node": ">=4.0" + }, + "dependencies": {}, + "devDependencies": { + "coffeescript": "1.*", + "mocha": "*", + "coffee-coverage": "2.*", + "istanbul": "*", + "coveralls": "*", + "xpath": "*" + }, + "scripts": { + "prepublishOnly": "coffee -co lib src", + "postpublish": "rm -rf lib", + "test": "mocha \"test/**/*.coffee\" && istanbul report text lcov" + } +} diff --git a/package-lock.json b/package-lock.json index 068806f4..aa77eee2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,6 +14,7 @@ "@actions/github": "^5.0.1", "@actions/tool-cache": "^1.7.1", "@octokit/plugin-throttling": "^3.6.2", + "cache": "github:actions/cache", "minimist": "^1.2.6", "mkdirp-promise": "^5.0.1", "node-fetch": "^2.6.7" @@ -23,18 +24,50 @@ "typescript": "^3.9.7" } }, + "node_modules/@actions/cache": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.4.tgz", + "integrity": "sha512-B6c8JNTcgMUcstwbCSCjgHrSxzJ5ABxX6Y1Q9lfe8lT08ZVrnUetEZfxYRCwTgAf/gHsjdgWHW4O8Z+VvPvUuQ==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dependencies": { + "tunnel": "^0.0.6" + } + }, "node_modules/@actions/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", - "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.8.2.tgz", + "integrity": "sha512-FXcBL7nyik8K5ODeCKlxi+vts7torOkoDAKfeh61EAkAy1HAvwn9uVzZBY0f15YcQTcZZ2/iSGBFHEuioZWfDA==", "dependencies": { - "@actions/http-client": "^1.0.11" + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/core/node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dependencies": { + "tunnel": "^0.0.6" } }, "node_modules/@actions/exec": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz", - "integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", "dependencies": { "@actions/io": "^1.0.1" } @@ -50,6 +83,15 @@ "@octokit/plugin-rest-endpoint-methods": "^5.13.0" } }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "node_modules/@actions/http-client": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", @@ -59,9 +101,9 @@ } }, "node_modules/@actions/io": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz", - "integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" }, "node_modules/@actions/tool-cache": { "version": "1.7.1", @@ -76,14 +118,217 @@ "uuid": "^3.3.2" } }, - "node_modules/@actions/tool-cache/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/abort-controller/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-auth": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.3.2.tgz", + "integrity": "sha512-7CU6DmCHIZp5ZPiZ9r3J17lTKMmYsm/zGvNkjArQwPkrLlZ1TZ+EUYfGgh2X31OLMVAQCTJZW4cXHJi02EbJnA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-http": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.5.tgz", + "integrity": "sha512-kctMqSQ6zfnlFpuYzfUKadeTyOQYbIQ+3Rj7dzVC3Dk1dOnHroTwR9hLYKX8/n85iJpkyaksaXpuh5L7GJRYuQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@azure/core-http/node_modules/tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/core-http/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-http/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "bin": { - "semver": "bin/semver.js" + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-paging": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.3.0.tgz", + "integrity": "sha512-H6Tg9eBm0brHqLy0OSAGzxIh1t4UL8eZVrSUMJ60Ra9cwq2pOskFqVpz2pYoHDsBY1jZ4V/P8LRGb5D5pmC6rg==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-paging/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-tracing/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/logger": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.3.tgz", + "integrity": "sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/logger/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.1.tgz", + "integrity": "sha512-LLi4jRe/qy5IM8U2CkoDgSZp2OH+MgDe2wePmhz8uY84Svc53EhHaamVyoU6BjjHBxvCRh1vcD1urJDccrxqIw==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + } + }, + "node_modules/@azure/ms-rest-js/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" } }, + "node_modules/@azure/storage-blob": { + "version": "12.10.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.10.0.tgz", + "integrity": "sha512-FBEPKGnvtQJS8V8Tg1P9obgmVD9AodrIfwtwhBpsjenClhFyugMp3HPJY0tF7rInUB/CivKBCbnQKrUnKxqxzw==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/storage-blob/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/@octokit/auth-token": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", @@ -197,11 +442,69 @@ "@octokit/openapi-types": "^11.2.0" } }, + "node_modules/@opentelemetry/api": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.1.0.tgz", + "integrity": "sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@types/node": { "version": "12.19.4", "resolved": "https://registry.npmjs.org/@types/node/-/node-12.19.4.tgz", - "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==", - "dev": true + "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==", + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/before-after-hook": { "version": "2.2.2", @@ -213,11 +516,92 @@ "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/cache": { + "version": "3.0.2", + "resolved": "git+ssh://git@github.com/actions/cache.git#95f200e41cfa87b8e07f30196c0df17a67e67786", + "license": "MIT", + "dependencies": { + "@actions/cache": "^2.0.2", + "@actions/core": "^1.7.0", + "@actions/exec": "^1.1.1", + "@actions/io": "^1.1.2" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", + "engines": { + "node": ">=4" + } + }, "node_modules/is-plain-object": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", @@ -226,6 +610,36 @@ "node": ">=0.10.0" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", @@ -281,11 +695,63 @@ "wrappy": "1" } }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "dependencies": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -312,6 +778,14 @@ "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" }, + "node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -339,21 +813,77 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } } }, "dependencies": { + "@actions/cache": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.4.tgz", + "integrity": "sha512-B6c8JNTcgMUcstwbCSCjgHrSxzJ5ABxX6Y1Q9lfe8lT08ZVrnUetEZfxYRCwTgAf/gHsjdgWHW4O8Z+VvPvUuQ==", + "requires": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + }, + "dependencies": { + "@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "requires": { + "tunnel": "^0.0.6" + } + } + } + }, "@actions/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", - "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.8.2.tgz", + "integrity": "sha512-FXcBL7nyik8K5ODeCKlxi+vts7torOkoDAKfeh61EAkAy1HAvwn9uVzZBY0f15YcQTcZZ2/iSGBFHEuioZWfDA==", "requires": { - "@actions/http-client": "^1.0.11" + "@actions/http-client": "^2.0.1" + }, + "dependencies": { + "@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "requires": { + "tunnel": "^0.0.6" + } + } } }, "@actions/exec": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz", - "integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", "requires": { "@actions/io": "^1.0.1" } @@ -369,6 +899,15 @@ "@octokit/plugin-rest-endpoint-methods": "^5.13.0" } }, + "@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "requires": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "@actions/http-client": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", @@ -378,9 +917,9 @@ } }, "@actions/io": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz", - "integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA==" + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz", + "integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==" }, "@actions/tool-cache": { "version": "1.7.1", @@ -393,12 +932,198 @@ "@actions/io": "^1.1.1", "semver": "^6.1.0", "uuid": "^3.3.2" + } + }, + "@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "requires": { + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/core-auth": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.3.2.tgz", + "integrity": "sha512-7CU6DmCHIZp5ZPiZ9r3J17lTKMmYsm/zGvNkjArQwPkrLlZ1TZ+EUYfGgh2X31OLMVAQCTJZW4cXHJi02EbJnA==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" }, "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/core-http": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.5.tgz", + "integrity": "sha512-kctMqSQ6zfnlFpuYzfUKadeTyOQYbIQ+3Rj7dzVC3Dk1dOnHroTwR9hLYKX8/n85iJpkyaksaXpuh5L7GJRYuQ==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "dependencies": { + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + } + }, + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } + } + }, + "@azure/core-lro": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/core-paging": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.3.0.tgz", + "integrity": "sha512-H6Tg9eBm0brHqLy0OSAGzxIh1t4UL8eZVrSUMJ60Ra9cwq2pOskFqVpz2pYoHDsBY1jZ4V/P8LRGb5D5pmC6rg==", + "requires": { + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "requires": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/logger": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.3.tgz", + "integrity": "sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==", + "requires": { + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + } + } + }, + "@azure/ms-rest-js": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.1.tgz", + "integrity": "sha512-LLi4jRe/qy5IM8U2CkoDgSZp2OH+MgDe2wePmhz8uY84Svc53EhHaamVyoU6BjjHBxvCRh1vcD1urJDccrxqIw==", + "requires": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + }, + "dependencies": { + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } + } + }, + "@azure/storage-blob": { + "version": "12.10.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.10.0.tgz", + "integrity": "sha512-FBEPKGnvtQJS8V8Tg1P9obgmVD9AodrIfwtwhBpsjenClhFyugMp3HPJY0tF7rInUB/CivKBCbnQKrUnKxqxzw==", + "requires": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" } } }, @@ -506,11 +1231,62 @@ "@octokit/openapi-types": "^11.2.0" } }, + "@opentelemetry/api": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.1.0.tgz", + "integrity": "sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==" + }, "@types/node": { "version": "12.19.4", "resolved": "https://registry.npmjs.org/@types/node/-/node-12.19.4.tgz", - "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==", - "dev": true + "integrity": "sha512-o3oj1bETk8kBwzz1WlO6JWL/AfAA3Vm6J1B3C9CsdxHYp7XgPiH7OEXPUbZTndHlRaIElrANkQfe6ZmfJb3H2w==" + }, + "@types/node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==", + "requires": { + "@types/node": "*", + "form-data": "^3.0.0" + }, + "dependencies": { + "form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + } + } + }, + "@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "requires": { + "@types/node": "*" + } + }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "before-after-hook": { "version": "2.2.2", @@ -522,16 +1298,99 @@ "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "cache": { + "version": "git+ssh://git@github.com/actions/cache.git#95f200e41cfa87b8e07f30196c0df17a67e67786", + "from": "cache@github:actions/cache", + "requires": { + "@actions/cache": "^2.0.2", + "@actions/core": "^1.7.0", + "@actions/exec": "^1.1.1", + "@actions/io": "^1.1.2" + } + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, "deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=" + }, "is-plain-object": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, "minimist": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", @@ -566,11 +1425,51 @@ "wrappy": "1" } }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" + }, + "psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, + "tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "requires": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + } + }, "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, "tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -587,6 +1486,11 @@ "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -610,6 +1514,20 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" } } } From 9096979bcdff91c31cd9fbe8ecbdba72a857b924 Mon Sep 17 00:00:00 2001 From: Anton Tayanovskyy Date: Thu, 19 May 2022 10:51:28 -0400 Subject: [PATCH 3/4] Run tsc --- lib/main.js | 62 +++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 55 insertions(+), 7 deletions(-) diff --git a/lib/main.js b/lib/main.js index 795a7853..914687c3 100644 --- a/lib/main.js +++ b/lib/main.js @@ -1,7 +1,11 @@ "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -14,7 +18,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -28,10 +32,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const cache = __importStar(require("@actions/cache")); const core = __importStar(require("@actions/core")); const tc = __importStar(require("@actions/tool-cache")); const utils_1 = require("@actions/github/lib/utils"); -const os = __importStar(require("os")); const plugin_throttling_1 = require("@octokit/plugin-throttling"); const ThrottlingOctokit = utils_1.GitHub.plugin(plugin_throttling_1.throttling); function run() { @@ -53,7 +59,7 @@ function run() { core.info(`Retrying after ${retryAfter} seconds.`); return true; }, - } }, utils_1.getOctokitOptions(token))); + } }, (0, utils_1.getOctokitOptions)(token))); const repo = core.getInput("repo"); if (!repo) { throw new Error(`Repo was not specified`); @@ -62,6 +68,9 @@ function run() { if (!tag) { throw new Error(`Tag not specified`); } + const cacheEnabled = (core.getInput("cache") === "enable") + && tag !== "latest" + && tag !== ""; const [owner, project] = repo.split("/"); let osMatch = []; // Determine Platform @@ -103,6 +112,24 @@ function run() { } core.info(`==> System reported arch: ${os.arch()}`); core.info(`==> Using arch: ${osArch}`); + let toolInfo = { + owner: owner, + project: project, + tag: tag, + osArch: osArch, + osPlatform: osPlatform + }; + let dest = toolPath(toolInfo); + // Look in the cache first. + let cacheKey = cachePrimaryKey(toolInfo); + if (cacheEnabled && cacheKey !== undefined) { + let ok = yield cache.restoreCache([dest], cacheKey); + if (ok !== undefined) { + core.info(`Found ${project} in the cache: ${dest}`); + core.addPath(dest); + return; + } + } let getReleaseUrl; if (tag === "latest") { getReleaseUrl = yield octokit.rest.repos.getLatestRelease({ @@ -132,9 +159,12 @@ function run() { const url = asset.browser_download_url; core.info(`Downloading ${project} from ${url}`); const binPath = yield tc.downloadTool(url); - const extractedPath = yield extractFn(binPath); - core.info(`Successfully extracted ${project} to ${extractedPath}`); - core.addPath(extractedPath); + yield extractFn(binPath, dest); + if (cacheEnabled && cacheKey !== undefined) { + yield cache.saveCache([dest], cacheKey); + } + core.addPath(dest); + core.info(`Successfully extracted ${project} to ${dest}`); } catch (error) { if (error instanceof Error) { @@ -146,6 +176,24 @@ function run() { } }); } +function cachePrimaryKey(info) { + // Currently not caching "latest" verisons of the tool. + if (info.tag === "latest") { + return undefined; + } + return "action-install-gh-release/" + + `${info.owner}/${info.project}/${info.tag}/${info.osPlatform}-${info.osArch}`; +} +function toolPath(info) { + return path.join(getCacheDirectory(), info.owner, info.project, info.tag, `${info.osPlatform}-${info.osArch}`); +} +function getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + if (cacheDirectory === '') { + core.warning('Expected RUNNER_TOOL_CACHE to be defined'); + } + return cacheDirectory; +} function getExtractFn(assetName) { if (assetName.endsWith('.tar.gz')) { return tc.extractTar; From 7f3add2a9da00a8f2f5afd9c4ea1e0ad50115754 Mon Sep 17 00:00:00 2001 From: Anton Tayanovskyy Date: Thu, 19 May 2022 10:53:09 -0400 Subject: [PATCH 4/4] Expose the cache parameter in metadata --- action.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/action.yml b/action.yml index 2c14a21e..13f8c8e1 100644 --- a/action.yml +++ b/action.yml @@ -15,6 +15,9 @@ inputs: arch: description: "OS Architecture to match in release package. Specify this parameter if the repository releases do not follow a normal convention otherwise it will be auto-detected." required: false + cache: + description: "When set to 'enable', caches the downloads of known tags with actions/cache" + required: false branding: icon: "archive" color: "green"